var/home/core/zuul-output/0000755000175000017500000000000015071575677014551 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071602012015463 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003615627615071602003017707 0ustar rootrootOct 09 00:06:41 crc systemd[1]: Starting Kubernetes Kubelet... Oct 09 00:06:41 crc restorecon[4668]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:41 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 00:06:42 crc restorecon[4668]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 09 00:06:42 crc kubenswrapper[4810]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 00:06:42 crc kubenswrapper[4810]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 09 00:06:42 crc kubenswrapper[4810]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 00:06:42 crc kubenswrapper[4810]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 00:06:42 crc kubenswrapper[4810]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 09 00:06:42 crc kubenswrapper[4810]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 00:06:42 crc kubenswrapper[4810]: I1009 00:06:42.993226 4810 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999293 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999326 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999339 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999349 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999358 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999367 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999374 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999382 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999390 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999398 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999409 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999419 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999428 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999436 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999445 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999454 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999462 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999470 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999481 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 00:06:42 crc kubenswrapper[4810]: W1009 00:06:42.999489 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999498 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999508 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999517 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999524 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999544 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999552 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999560 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999567 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999576 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999583 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999591 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999598 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999605 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999613 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999621 4810 feature_gate.go:330] unrecognized feature gate: Example Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999628 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999636 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999644 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999652 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999660 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999671 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999680 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999688 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999696 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999704 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999713 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999723 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999731 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999739 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999746 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999754 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999762 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999769 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999779 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999787 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999795 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999802 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999810 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999817 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999851 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999860 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999867 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999875 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999882 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999889 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999898 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999905 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999913 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999920 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999928 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:42.999938 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001020 4810 flags.go:64] FLAG: --address="0.0.0.0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001041 4810 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001074 4810 flags.go:64] FLAG: --anonymous-auth="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001093 4810 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001105 4810 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001115 4810 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001127 4810 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001155 4810 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001164 4810 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001173 4810 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001184 4810 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001193 4810 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001202 4810 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001211 4810 flags.go:64] FLAG: --cgroup-root="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001219 4810 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001228 4810 flags.go:64] FLAG: --client-ca-file="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001237 4810 flags.go:64] FLAG: --cloud-config="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001246 4810 flags.go:64] FLAG: --cloud-provider="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001255 4810 flags.go:64] FLAG: --cluster-dns="[]" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001270 4810 flags.go:64] FLAG: --cluster-domain="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001280 4810 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001290 4810 flags.go:64] FLAG: --config-dir="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001300 4810 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001310 4810 flags.go:64] FLAG: --container-log-max-files="5" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001322 4810 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001331 4810 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001340 4810 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001350 4810 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001359 4810 flags.go:64] FLAG: --contention-profiling="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001367 4810 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001378 4810 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001387 4810 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001396 4810 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001414 4810 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001423 4810 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001431 4810 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001440 4810 flags.go:64] FLAG: --enable-load-reader="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001449 4810 flags.go:64] FLAG: --enable-server="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001458 4810 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001476 4810 flags.go:64] FLAG: --event-burst="100" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001485 4810 flags.go:64] FLAG: --event-qps="50" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001494 4810 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001503 4810 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001524 4810 flags.go:64] FLAG: --eviction-hard="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001535 4810 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001543 4810 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001552 4810 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001562 4810 flags.go:64] FLAG: --eviction-soft="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001570 4810 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001579 4810 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001588 4810 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001597 4810 flags.go:64] FLAG: --experimental-mounter-path="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001605 4810 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001614 4810 flags.go:64] FLAG: --fail-swap-on="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001623 4810 flags.go:64] FLAG: --feature-gates="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001633 4810 flags.go:64] FLAG: --file-check-frequency="20s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001642 4810 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001651 4810 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001660 4810 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001669 4810 flags.go:64] FLAG: --healthz-port="10248" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001678 4810 flags.go:64] FLAG: --help="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001687 4810 flags.go:64] FLAG: --hostname-override="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001696 4810 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001705 4810 flags.go:64] FLAG: --http-check-frequency="20s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001714 4810 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001722 4810 flags.go:64] FLAG: --image-credential-provider-config="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001731 4810 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001740 4810 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001748 4810 flags.go:64] FLAG: --image-service-endpoint="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001757 4810 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001765 4810 flags.go:64] FLAG: --kube-api-burst="100" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001774 4810 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001784 4810 flags.go:64] FLAG: --kube-api-qps="50" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001792 4810 flags.go:64] FLAG: --kube-reserved="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001801 4810 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001810 4810 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001844 4810 flags.go:64] FLAG: --kubelet-cgroups="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001853 4810 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001862 4810 flags.go:64] FLAG: --lock-file="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001883 4810 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001892 4810 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001901 4810 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001915 4810 flags.go:64] FLAG: --log-json-split-stream="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001924 4810 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001933 4810 flags.go:64] FLAG: --log-text-split-stream="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001941 4810 flags.go:64] FLAG: --logging-format="text" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001950 4810 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001960 4810 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001968 4810 flags.go:64] FLAG: --manifest-url="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001977 4810 flags.go:64] FLAG: --manifest-url-header="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001989 4810 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.001997 4810 flags.go:64] FLAG: --max-open-files="1000000" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002009 4810 flags.go:64] FLAG: --max-pods="110" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002019 4810 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002030 4810 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002039 4810 flags.go:64] FLAG: --memory-manager-policy="None" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002047 4810 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002056 4810 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002066 4810 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002074 4810 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002093 4810 flags.go:64] FLAG: --node-status-max-images="50" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002102 4810 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002111 4810 flags.go:64] FLAG: --oom-score-adj="-999" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002120 4810 flags.go:64] FLAG: --pod-cidr="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002129 4810 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002142 4810 flags.go:64] FLAG: --pod-manifest-path="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002151 4810 flags.go:64] FLAG: --pod-max-pids="-1" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002159 4810 flags.go:64] FLAG: --pods-per-core="0" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002169 4810 flags.go:64] FLAG: --port="10250" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002178 4810 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002186 4810 flags.go:64] FLAG: --provider-id="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002195 4810 flags.go:64] FLAG: --qos-reserved="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002203 4810 flags.go:64] FLAG: --read-only-port="10255" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002213 4810 flags.go:64] FLAG: --register-node="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002222 4810 flags.go:64] FLAG: --register-schedulable="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002242 4810 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002257 4810 flags.go:64] FLAG: --registry-burst="10" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002266 4810 flags.go:64] FLAG: --registry-qps="5" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002274 4810 flags.go:64] FLAG: --reserved-cpus="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002284 4810 flags.go:64] FLAG: --reserved-memory="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002295 4810 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002303 4810 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002312 4810 flags.go:64] FLAG: --rotate-certificates="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002321 4810 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002330 4810 flags.go:64] FLAG: --runonce="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002344 4810 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002354 4810 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002362 4810 flags.go:64] FLAG: --seccomp-default="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002371 4810 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002380 4810 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002390 4810 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002398 4810 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002407 4810 flags.go:64] FLAG: --storage-driver-password="root" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002416 4810 flags.go:64] FLAG: --storage-driver-secure="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002425 4810 flags.go:64] FLAG: --storage-driver-table="stats" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002433 4810 flags.go:64] FLAG: --storage-driver-user="root" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002442 4810 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002451 4810 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002460 4810 flags.go:64] FLAG: --system-cgroups="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002468 4810 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002482 4810 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002491 4810 flags.go:64] FLAG: --tls-cert-file="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002499 4810 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002925 4810 flags.go:64] FLAG: --tls-min-version="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002935 4810 flags.go:64] FLAG: --tls-private-key-file="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002944 4810 flags.go:64] FLAG: --topology-manager-policy="none" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002954 4810 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002964 4810 flags.go:64] FLAG: --topology-manager-scope="container" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002973 4810 flags.go:64] FLAG: --v="2" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.002984 4810 flags.go:64] FLAG: --version="false" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.003006 4810 flags.go:64] FLAG: --vmodule="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.003029 4810 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.003039 4810 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003261 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003271 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003280 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003288 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003300 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003308 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003317 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003324 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003334 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003342 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003349 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003358 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003365 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003376 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003385 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003394 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003403 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003411 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003419 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003427 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003437 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003445 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003453 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003461 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003469 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003476 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003484 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003491 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003499 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003506 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003514 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003521 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003529 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003537 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003557 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003567 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003576 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003583 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003591 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003599 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003606 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003614 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003621 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003629 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003637 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003645 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003652 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003660 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003667 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003675 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003683 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003691 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003698 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003706 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003714 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003722 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003729 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003739 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003749 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003759 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003768 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003777 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003786 4810 feature_gate.go:330] unrecognized feature gate: Example Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003794 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003802 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003810 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003844 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003853 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003861 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003869 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.003889 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.003912 4810 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.014848 4810 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.014883 4810 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.014990 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015004 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015011 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015018 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015024 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015029 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015034 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015040 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015044 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015049 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015054 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015059 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015063 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015068 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015073 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015078 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015084 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015090 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015095 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015100 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015106 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015112 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015117 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015122 4810 feature_gate.go:330] unrecognized feature gate: Example Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015127 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015132 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015137 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015141 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015146 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015151 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015156 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015160 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015165 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015170 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015177 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015183 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015189 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015195 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015200 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015205 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015210 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015215 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015220 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015233 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015239 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015248 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015260 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015267 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015274 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015280 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015286 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015290 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015295 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015300 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015305 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015310 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015317 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015327 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015332 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015338 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015344 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015349 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015355 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015360 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015365 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015370 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015375 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015382 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015388 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015393 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015399 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.015408 4810 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015580 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015591 4810 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015597 4810 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015602 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015607 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015612 4810 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015617 4810 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015623 4810 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015628 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015633 4810 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015638 4810 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015643 4810 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015649 4810 feature_gate.go:330] unrecognized feature gate: Example Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015654 4810 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015660 4810 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015668 4810 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015673 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015678 4810 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015684 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015690 4810 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015696 4810 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015700 4810 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015705 4810 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015710 4810 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015715 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015720 4810 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015725 4810 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015731 4810 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015737 4810 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015743 4810 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015748 4810 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015753 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015758 4810 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015763 4810 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015768 4810 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015773 4810 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015778 4810 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015783 4810 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015787 4810 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015792 4810 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015797 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015802 4810 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015807 4810 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015812 4810 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015851 4810 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015857 4810 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015861 4810 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015866 4810 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015871 4810 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015876 4810 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015881 4810 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015886 4810 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015891 4810 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015896 4810 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015901 4810 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015906 4810 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015912 4810 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015920 4810 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015932 4810 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015939 4810 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015946 4810 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015952 4810 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015958 4810 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015963 4810 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015969 4810 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015974 4810 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015978 4810 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015985 4810 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015992 4810 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.015999 4810 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.016006 4810 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.016014 4810 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.017138 4810 server.go:940] "Client rotation is on, will bootstrap in background" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.021636 4810 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.021722 4810 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.023979 4810 server.go:997] "Starting client certificate rotation" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.024010 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.025283 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-11 11:43:47.9859977 +0000 UTC Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.025381 4810 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2267h37m4.960621135s for next certificate rotation Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.056099 4810 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.062154 4810 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.078978 4810 log.go:25] "Validated CRI v1 runtime API" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.120894 4810 log.go:25] "Validated CRI v1 image API" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.123645 4810 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.129848 4810 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-09-00-01-32-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.129895 4810 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.155362 4810 manager.go:217] Machine: {Timestamp:2025-10-09 00:06:43.152039419 +0000 UTC m=+0.677678170 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:d0ba6a9a-46d9-4e3d-9bad-8681de143186 BootID:7658369b-ccb8-43ff-a2da-9dae70b9fe9a Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:6b:70:2c Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:6b:70:2c Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e1:24:6f Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:e8:15:5c Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:90:66:7e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:12:c7:a3 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:da:bb:f2:cb:41:78 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:66:ad:b1:b0:35:82 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.155658 4810 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.155808 4810 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.158066 4810 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.158277 4810 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.158330 4810 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.160228 4810 topology_manager.go:138] "Creating topology manager with none policy" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.160255 4810 container_manager_linux.go:303] "Creating device plugin manager" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.160760 4810 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.160791 4810 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.161027 4810 state_mem.go:36] "Initialized new in-memory state store" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.161126 4810 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.165458 4810 kubelet.go:418] "Attempting to sync node with API server" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.165483 4810 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.165501 4810 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.165516 4810 kubelet.go:324] "Adding apiserver pod source" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.165530 4810 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.171341 4810 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.172467 4810 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.173914 4810 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.174510 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.174634 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.174512 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.174709 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175879 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175914 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175927 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175939 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175958 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175970 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175981 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.175996 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.176006 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.176019 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.176031 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.176042 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.178538 4810 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.179432 4810 server.go:1280] "Started kubelet" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.180559 4810 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 09 00:06:43 crc systemd[1]: Started Kubernetes Kubelet. Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.180511 4810 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.187101 4810 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.188528 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.188584 4810 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.188685 4810 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.188692 4810 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 08:39:12.451397085 +0000 UTC Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.188739 4810 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1856h32m29.262660073s for next certificate rotation Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.189216 4810 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.189249 4810 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.189290 4810 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.189456 4810 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.193158 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="200ms" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.193244 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193348 4810 factory.go:153] Registering CRI-O factory Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.193380 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193412 4810 factory.go:221] Registration of the crio container factory successfully Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193509 4810 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193527 4810 factory.go:55] Registering systemd factory Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193540 4810 factory.go:221] Registration of the systemd container factory successfully Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193569 4810 factory.go:103] Registering Raw factory Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.193594 4810 manager.go:1196] Started watching for new ooms in manager Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.196004 4810 manager.go:319] Starting recovery of all containers Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.193886 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186ca9f01f65f385 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-09 00:06:43.179041669 +0000 UTC m=+0.704680410,LastTimestamp:2025-10-09 00:06:43.179041669 +0000 UTC m=+0.704680410,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.201860 4810 server.go:460] "Adding debug handlers to kubelet server" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210355 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210455 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210469 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210501 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210516 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210529 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210543 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210576 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210592 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210607 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210619 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210632 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210665 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210680 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210691 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210706 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210738 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210856 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210875 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210889 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210901 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210937 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210948 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210960 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.210975 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211010 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211029 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211044 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211056 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211089 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211101 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211119 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211136 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211180 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211199 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211212 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211227 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211260 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211272 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211287 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211300 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211334 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211349 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211362 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211375 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211388 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211423 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211438 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211450 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211462 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211498 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211518 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211544 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211585 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211598 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211614 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211626 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211659 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211672 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211684 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211696 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211707 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211741 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211755 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211767 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211778 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211789 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211834 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211851 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211866 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211901 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211913 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.211950 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212030 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212047 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212058 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212070 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212081 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212094 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212105 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212116 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212149 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212163 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212175 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212186 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212201 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212213 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212225 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212236 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212249 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212265 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212283 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212300 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212313 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212325 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212383 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212415 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212434 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212451 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212465 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212481 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212498 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212514 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212531 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212556 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212576 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212595 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212618 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212637 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212654 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212671 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212691 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212710 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212765 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212787 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212806 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212843 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212864 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212880 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212898 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212919 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212936 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212953 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212973 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.212991 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213007 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213023 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213043 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213060 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213076 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213094 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213110 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213127 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213141 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213159 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213174 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213190 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213205 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213222 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213237 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213252 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213267 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213283 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213299 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213315 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213329 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213344 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213359 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213373 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213395 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.213412 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.216903 4810 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.216941 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.216962 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.216979 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.216996 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217013 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217031 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217047 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217064 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217081 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217098 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217114 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217132 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217147 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217163 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217180 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217197 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217214 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217229 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217247 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217263 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217279 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217295 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217310 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217330 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217346 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217362 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217378 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217392 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217407 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217427 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217444 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217493 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217510 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217524 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217539 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217555 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217572 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217589 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217612 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217633 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217649 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217661 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217675 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217688 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217774 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217796 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217814 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217850 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217867 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217884 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217900 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217919 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217937 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217954 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217972 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.217991 4810 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.218045 4810 reconstruct.go:97] "Volume reconstruction finished" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.218057 4810 reconciler.go:26] "Reconciler: start to sync state" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.228418 4810 manager.go:324] Recovery completed Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.239936 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.241733 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.241777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.241790 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.242483 4810 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.242530 4810 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.242556 4810 state_mem.go:36] "Initialized new in-memory state store" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.248886 4810 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.251540 4810 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.252516 4810 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.252565 4810 kubelet.go:2335] "Starting kubelet main sync loop" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.252626 4810 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.257205 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.257316 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.263456 4810 policy_none.go:49] "None policy: Start" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.264875 4810 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.264907 4810 state_mem.go:35] "Initializing new in-memory state store" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.290461 4810 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.317752 4810 manager.go:334] "Starting Device Plugin manager" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.317797 4810 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.317811 4810 server.go:79] "Starting device plugin registration server" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.318179 4810 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.318195 4810 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.318357 4810 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.318467 4810 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.318477 4810 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.329914 4810 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.353347 4810 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.353471 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.354917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.354965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.354978 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.355196 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.355619 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.355682 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356058 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356069 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356186 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356342 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356383 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356747 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.356761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.357301 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.357329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.357340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.357441 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.357981 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.358020 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.358356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.358383 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.358395 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.361618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.361657 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.361672 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.363624 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.363664 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.363682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.363878 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.364023 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.364094 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.365458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.365498 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.365513 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.367521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.367565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.367580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.367893 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.367947 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.369038 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.369074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.369089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.394058 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="400ms" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.418476 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.419605 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.419645 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.419659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.419686 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420179 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420268 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.420269 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420337 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420388 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420427 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420459 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420488 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420519 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420549 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.420724 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.421023 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.421064 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.421107 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.421160 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.421211 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522607 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522654 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522688 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522719 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522749 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522779 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522809 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522859 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522871 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522904 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522955 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522963 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523001 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522988 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523023 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523023 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523045 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.522959 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523139 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523176 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523211 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523245 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523265 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523276 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523303 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523343 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523339 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523328 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523407 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.523353 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.620536 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.622202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.622271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.622294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.622341 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.623382 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.696067 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.706050 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.721951 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.739597 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: I1009 00:06:43.746557 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.753458 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-04bfacfdde35ed42cec91f18084bd71cbb468bf67d79e0833682bcfd4d156a80 WatchSource:0}: Error finding container 04bfacfdde35ed42cec91f18084bd71cbb468bf67d79e0833682bcfd4d156a80: Status 404 returned error can't find the container with id 04bfacfdde35ed42cec91f18084bd71cbb468bf67d79e0833682bcfd4d156a80 Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.761742 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-74df6ff8778e1028cff307527118fd2b60bfaf2527fedbb884da63271d4c3691 WatchSource:0}: Error finding container 74df6ff8778e1028cff307527118fd2b60bfaf2527fedbb884da63271d4c3691: Status 404 returned error can't find the container with id 74df6ff8778e1028cff307527118fd2b60bfaf2527fedbb884da63271d4c3691 Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.776994 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-5479f8033891e2b33232a953cb111c8e1610d9fc3dafb154105c797d4d8685e5 WatchSource:0}: Error finding container 5479f8033891e2b33232a953cb111c8e1610d9fc3dafb154105c797d4d8685e5: Status 404 returned error can't find the container with id 5479f8033891e2b33232a953cb111c8e1610d9fc3dafb154105c797d4d8685e5 Oct 09 00:06:43 crc kubenswrapper[4810]: W1009 00:06:43.779781 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-c3d57a587f85c9ab1724b5f2e883592651946e3c930b5779569b03f0cd1fde29 WatchSource:0}: Error finding container c3d57a587f85c9ab1724b5f2e883592651946e3c930b5779569b03f0cd1fde29: Status 404 returned error can't find the container with id c3d57a587f85c9ab1724b5f2e883592651946e3c930b5779569b03f0cd1fde29 Oct 09 00:06:43 crc kubenswrapper[4810]: E1009 00:06:43.796848 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="800ms" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.024417 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.026323 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.026371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.026382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.026412 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.027035 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Oct 09 00:06:44 crc kubenswrapper[4810]: W1009 00:06:44.150500 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.150637 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.189457 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:44 crc kubenswrapper[4810]: W1009 00:06:44.220050 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.220144 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.257912 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3abe9060d998c3aa9e16f85b636f46e078a3ce5ffb954917555677e31f179626"} Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.259524 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"04bfacfdde35ed42cec91f18084bd71cbb468bf67d79e0833682bcfd4d156a80"} Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.261091 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5479f8033891e2b33232a953cb111c8e1610d9fc3dafb154105c797d4d8685e5"} Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.262884 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"c3d57a587f85c9ab1724b5f2e883592651946e3c930b5779569b03f0cd1fde29"} Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.263894 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"74df6ff8778e1028cff307527118fd2b60bfaf2527fedbb884da63271d4c3691"} Oct 09 00:06:44 crc kubenswrapper[4810]: W1009 00:06:44.415156 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.415257 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.554960 4810 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.177:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186ca9f01f65f385 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-09 00:06:43.179041669 +0000 UTC m=+0.704680410,LastTimestamp:2025-10-09 00:06:43.179041669 +0000 UTC m=+0.704680410,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.597783 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="1.6s" Oct 09 00:06:44 crc kubenswrapper[4810]: W1009 00:06:44.654166 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.654299 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.828927 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.831000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.831058 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.831078 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:44 crc kubenswrapper[4810]: I1009 00:06:44.831111 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 00:06:44 crc kubenswrapper[4810]: E1009 00:06:44.831794 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.189891 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.268707 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923" exitCode=0 Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.268875 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923"} Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.268970 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.270612 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.270661 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.270681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.271962 4810 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c" exitCode=0 Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.272022 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c"} Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.272155 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.273691 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.273742 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.273762 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.274674 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.275033 4810 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303" exitCode=0 Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.275079 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303"} Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.275150 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.276808 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.276901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.276920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.279057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.279097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.279115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.282939 4810 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c" exitCode=0 Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.283086 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c"} Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.283199 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.284649 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.284695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.284716 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.289272 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e"} Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.289329 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde"} Oct 09 00:06:45 crc kubenswrapper[4810]: I1009 00:06:45.289354 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534"} Oct 09 00:06:46 crc kubenswrapper[4810]: W1009 00:06:46.043188 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:46 crc kubenswrapper[4810]: E1009 00:06:46.043267 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.189380 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:46 crc kubenswrapper[4810]: E1009 00:06:46.198921 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.177:6443: connect: connection refused" interval="3.2s" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.294735 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0e58cdd9f168937a38507c024471b5f08f253cd37b58a02ebc4e7e9622d5b44b"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.294916 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.296358 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.296382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.296394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.298589 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.298918 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.299166 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.298681 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.301079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.301097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.301108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.302154 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.302281 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.305846 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.305867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.305875 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.307035 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.307062 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.307071 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.307081 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.309436 4810 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759" exitCode=0 Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.309500 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759"} Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.309938 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.311204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.311227 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.311266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.432731 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.434009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.434054 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.434067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:46 crc kubenswrapper[4810]: I1009 00:06:46.434097 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 00:06:46 crc kubenswrapper[4810]: E1009 00:06:46.434582 4810 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.177:6443: connect: connection refused" node="crc" Oct 09 00:06:46 crc kubenswrapper[4810]: W1009 00:06:46.730454 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:46 crc kubenswrapper[4810]: E1009 00:06:46.730568 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:46 crc kubenswrapper[4810]: W1009 00:06:46.895143 4810 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.177:6443: connect: connection refused Oct 09 00:06:46 crc kubenswrapper[4810]: E1009 00:06:46.895249 4810 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.177:6443: connect: connection refused" logger="UnhandledError" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.021970 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.209042 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.316467 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544"} Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.316706 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.318124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.318166 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.318183 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.320899 4810 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32" exitCode=0 Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.320945 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32"} Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.321027 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.321088 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.321155 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.321194 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.321095 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.322525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.322553 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.322564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323192 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323221 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323311 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:47 crc kubenswrapper[4810]: I1009 00:06:47.323490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.333941 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.334018 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.334045 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.333931 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20"} Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.334234 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336"} Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.334273 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c"} Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.334300 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc"} Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.334028 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.335326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.335376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.335404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.335679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.335744 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.335772 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.336411 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.336470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.336490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:48 crc kubenswrapper[4810]: I1009 00:06:48.684644 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.343567 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.343644 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.343692 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.343517 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884"} Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.345278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.345344 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.345363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.345356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.345545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.345564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.635574 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.637728 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.637807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.637862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:49 crc kubenswrapper[4810]: I1009 00:06:49.637907 4810 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 00:06:50 crc kubenswrapper[4810]: I1009 00:06:50.112375 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 09 00:06:50 crc kubenswrapper[4810]: I1009 00:06:50.346657 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:50 crc kubenswrapper[4810]: I1009 00:06:50.348029 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:50 crc kubenswrapper[4810]: I1009 00:06:50.348105 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:50 crc kubenswrapper[4810]: I1009 00:06:50.348130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.140595 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.141085 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.142603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.142668 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.142694 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.349045 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.350682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.350761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.350802 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.889359 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.889564 4810 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.889620 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.891327 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.891401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:51 crc kubenswrapper[4810]: I1009 00:06:51.891421 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:52 crc kubenswrapper[4810]: I1009 00:06:52.029073 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:52 crc kubenswrapper[4810]: I1009 00:06:52.351645 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:52 crc kubenswrapper[4810]: I1009 00:06:52.353130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:52 crc kubenswrapper[4810]: I1009 00:06:52.353192 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:52 crc kubenswrapper[4810]: I1009 00:06:52.353230 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:53 crc kubenswrapper[4810]: E1009 00:06:53.330061 4810 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 09 00:06:53 crc kubenswrapper[4810]: I1009 00:06:53.888944 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:53 crc kubenswrapper[4810]: I1009 00:06:53.889171 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:53 crc kubenswrapper[4810]: I1009 00:06:53.890729 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:53 crc kubenswrapper[4810]: I1009 00:06:53.890860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:53 crc kubenswrapper[4810]: I1009 00:06:53.890891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:53 crc kubenswrapper[4810]: I1009 00:06:53.900513 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.141039 4810 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.141178 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.356582 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.357509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.357542 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.357554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:54 crc kubenswrapper[4810]: I1009 00:06:54.361148 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.359812 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.361379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.361442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.361452 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.690093 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.690362 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.692015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.692074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:55 crc kubenswrapper[4810]: I1009 00:06:55.692092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.190116 4810 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.355378 4810 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.355431 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.362735 4810 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.362948 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.366237 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.368267 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544" exitCode=255 Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.368408 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544"} Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.368655 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.370171 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.370219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.370233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:57 crc kubenswrapper[4810]: I1009 00:06:57.370931 4810 scope.go:117] "RemoveContainer" containerID="a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544" Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.373239 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.374899 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c"} Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.375037 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.376503 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.376560 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.376580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:58 crc kubenswrapper[4810]: I1009 00:06:58.691858 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:59 crc kubenswrapper[4810]: I1009 00:06:59.377023 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:06:59 crc kubenswrapper[4810]: I1009 00:06:59.377148 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:06:59 crc kubenswrapper[4810]: I1009 00:06:59.377671 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:06:59 crc kubenswrapper[4810]: I1009 00:06:59.377714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:06:59 crc kubenswrapper[4810]: I1009 00:06:59.377722 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:06:59 crc kubenswrapper[4810]: I1009 00:06:59.380689 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:07:00 crc kubenswrapper[4810]: I1009 00:07:00.380052 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:07:00 crc kubenswrapper[4810]: I1009 00:07:00.381744 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:00 crc kubenswrapper[4810]: I1009 00:07:00.381847 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:00 crc kubenswrapper[4810]: I1009 00:07:00.381864 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:01 crc kubenswrapper[4810]: I1009 00:07:01.382095 4810 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 00:07:01 crc kubenswrapper[4810]: I1009 00:07:01.383727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:01 crc kubenswrapper[4810]: I1009 00:07:01.383771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:01 crc kubenswrapper[4810]: I1009 00:07:01.383782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.348985 4810 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.352381 4810 trace.go:236] Trace[1227502756]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 00:06:52.066) (total time: 10285ms): Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[1227502756]: ---"Objects listed" error: 10285ms (00:07:02.352) Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[1227502756]: [10.285713817s] [10.285713817s] END Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.352430 4810 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358203 4810 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358328 4810 trace.go:236] Trace[1895514263]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 00:06:47.826) (total time: 14532ms): Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[1895514263]: ---"Objects listed" error: 14532ms (00:07:02.358) Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[1895514263]: [14.532153953s] [14.532153953s] END Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358363 4810 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358443 4810 trace.go:236] Trace[272058421]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 00:06:49.881) (total time: 12476ms): Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[272058421]: ---"Objects listed" error: 12476ms (00:07:02.358) Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[272058421]: [12.476371892s] [12.476371892s] END Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358478 4810 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358546 4810 trace.go:236] Trace[902448997]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 00:06:50.247) (total time: 12111ms): Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[902448997]: ---"Objects listed" error: 12110ms (00:07:02.358) Oct 09 00:07:02 crc kubenswrapper[4810]: Trace[902448997]: [12.111010675s] [12.111010675s] END Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.358568 4810 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.362580 4810 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.363004 4810 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.364932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.364973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.364984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.365004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.365015 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.379341 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.384996 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.385032 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.385041 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.385057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.385070 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.407915 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.412660 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.413037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.413232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.413427 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.413606 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.430356 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.431224 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.437866 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.438587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.438631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.438647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.438670 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.438684 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.447991 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.452981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.453033 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.453046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.453071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.453084 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.462430 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:02 crc kubenswrapper[4810]: E1009 00:07:02.462534 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.464092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.464115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.464124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.464141 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.464151 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.567178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.567227 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.567240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.567263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.567277 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.669081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.669125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.669142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.669162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.669173 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.771776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.771838 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.771851 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.771892 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.771906 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.874920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.874965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.874981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.875007 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.875023 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.977415 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.977454 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.977467 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.977487 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:02 crc kubenswrapper[4810]: I1009 00:07:02.977499 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:02Z","lastTransitionTime":"2025-10-09T00:07:02Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.080481 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.080526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.080535 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.080553 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.080565 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.176242 4810 apiserver.go:52] "Watching apiserver" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.179254 4810 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.179683 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-bw4pj","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-multus/multus-additional-cni-plugins-lzf84","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-ovn-kubernetes/ovnkube-node-sd2lj","openshift-machine-config-operator/machine-config-daemon-6752w","openshift-multus/multus-vrlxd","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180038 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180137 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180044 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180207 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180214 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180222 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180164 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.180292 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.180572 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.180814 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.181101 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.181255 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.181344 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.181798 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.182459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.182484 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.182494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.182510 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.182520 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.183354 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.184928 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.185269 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.185299 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.185342 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.186682 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.187406 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.187617 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188108 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188128 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188262 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188113 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188467 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188343 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188597 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188898 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188995 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189039 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189091 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.188955 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189220 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189297 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189360 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189369 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189398 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189644 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.189738 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.190070 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.190367 4810 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.190420 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.190510 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.192085 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.203670 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.213783 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.227493 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.238814 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.245076 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.254092 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263409 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263460 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263488 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263513 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263536 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263560 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263552 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263591 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263619 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263645 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263667 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263688 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263711 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263732 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263756 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263782 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263810 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263850 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263877 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263899 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263922 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263944 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263966 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.263989 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264011 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264032 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264057 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264061 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264083 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264109 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264133 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264156 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264179 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264202 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264227 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264251 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264275 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264302 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264324 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264349 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264370 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264641 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264677 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264895 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.265099 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.265126 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.265148 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.265371 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.265385 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.264374 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.265974 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266011 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266022 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266075 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266081 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266109 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266134 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266158 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266184 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266271 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266300 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266326 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266351 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266377 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266403 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266424 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266447 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266467 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266487 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266510 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266531 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266556 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266578 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266600 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266622 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266641 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266657 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266672 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266688 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266705 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266721 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266737 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266752 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266767 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266878 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266898 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266914 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266935 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266953 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266968 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266985 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267000 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267016 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267036 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267055 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267070 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267087 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267104 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267120 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267135 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267150 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267173 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267190 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267209 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267225 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267241 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267258 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267275 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267290 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267307 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267325 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267340 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267357 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267372 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267389 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267405 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267421 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267437 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267451 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267469 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267485 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267501 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267516 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267533 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267549 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267565 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267581 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267599 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267614 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267631 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267646 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267662 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267678 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267694 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267711 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267728 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267743 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267758 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267772 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267787 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267804 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267840 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267861 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267882 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267925 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267947 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267965 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267981 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267998 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268013 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268030 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268047 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268063 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268079 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268095 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268110 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268127 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268142 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268159 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268176 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268192 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268213 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268230 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268255 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268270 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268286 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268304 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268323 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268339 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268356 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268371 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268387 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268404 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268419 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268435 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268453 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268471 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268486 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268502 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268531 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268548 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268565 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268582 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268599 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268616 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268631 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269911 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269953 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269973 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269990 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270031 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270055 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270078 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270105 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270142 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270168 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270194 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270219 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270245 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270272 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270297 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270323 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270351 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270375 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270399 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270423 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270460 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270488 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270561 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270603 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270627 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-ovn-kubernetes\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270646 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-env-overrides\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270664 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-slash\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270679 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-daemon-config\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270699 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270720 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270747 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270772 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-systemd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270802 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270899 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-kubelet\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270928 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e7b43917-aa65-43dc-b71b-7de0af71d3f6-proxy-tls\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270962 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-multus-certs\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271413 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-etc-kubernetes\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271956 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-system-cni-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271986 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-cnibin\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272012 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c3f6a1ff-10fd-446e-9790-f13f432d1f50-hosts-file\") pod \"node-resolver-bw4pj\" (UID: \"c3f6a1ff-10fd-446e-9790-f13f432d1f50\") " pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272036 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-cnibin\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272074 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cbsf\" (UniqueName: \"kubernetes.io/projected/12a20549-7332-4c4f-b63a-38afc78107e3-kube-api-access-9cbsf\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272097 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/12a20549-7332-4c4f-b63a-38afc78107e3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272117 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-ovn\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272137 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-script-lib\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272163 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6229\" (UniqueName: \"kubernetes.io/projected/e7b43917-aa65-43dc-b71b-7de0af71d3f6-kube-api-access-z6229\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272195 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272241 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272267 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-system-cni-dir\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272292 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-os-release\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272317 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-netd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272347 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-etc-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272365 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272387 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272406 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272432 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-var-lib-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272450 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-node-log\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272466 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e7b43917-aa65-43dc-b71b-7de0af71d3f6-rootfs\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272482 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7b43917-aa65-43dc-b71b-7de0af71d3f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272498 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-cni-binary-copy\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272515 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272531 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-netns\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272548 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69qqx\" (UniqueName: \"kubernetes.io/projected/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-kube-api-access-69qqx\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272565 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-netns\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272580 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-kubelet\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272596 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-conf-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272611 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-cni-bin\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272630 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxjpz\" (UniqueName: \"kubernetes.io/projected/c3f6a1ff-10fd-446e-9790-f13f432d1f50-kube-api-access-fxjpz\") pod \"node-resolver-bw4pj\" (UID: \"c3f6a1ff-10fd-446e-9790-f13f432d1f50\") " pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272657 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-config\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272672 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272704 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272720 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-log-socket\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272735 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-k8s-cni-cncf-io\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272753 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-cni-multus\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272775 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272792 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/12a20549-7332-4c4f-b63a-38afc78107e3-cni-binary-copy\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272812 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-os-release\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272858 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-socket-dir-parent\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272884 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272911 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-systemd-units\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272928 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovn-node-metrics-cert\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272945 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqsrd\" (UniqueName: \"kubernetes.io/projected/bfb7a412-4af9-4aa0-a3e8-d46dab040385-kube-api-access-tqsrd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272960 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-cni-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272995 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-hostroot\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273011 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273030 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273047 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-bin\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273115 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273127 4810 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273139 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273150 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273161 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273172 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273184 4810 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273196 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273206 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273218 4810 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273269 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273288 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266108 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266312 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.266492 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267331 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267642 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267699 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267943 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267936 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.267968 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268180 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268324 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268649 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268647 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268734 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268754 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.268317 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269306 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.275654 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269342 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269521 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269624 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.269790 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270134 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270155 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270187 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270262 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.270272 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271093 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271158 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271591 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271613 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271848 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.271812 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272001 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272025 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272442 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272406 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272536 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272413 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.272757 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273133 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273150 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273163 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273227 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273486 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273509 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273620 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273705 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273906 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273888 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273922 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273966 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.273986 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.274207 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.274212 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.274391 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.274418 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.274550 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.275065 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.275293 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.275411 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.275984 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.275553 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.276030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.276122 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.276226 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.276337 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.276376 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.276623 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.276778 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:07:03.776750774 +0000 UTC m=+21.302389575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.277126 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.277180 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.277346 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.277369 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.277574 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.277796 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278092 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278075 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278174 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278298 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278305 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278394 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278428 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278408 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278492 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278571 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278608 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278764 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278788 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278862 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278889 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278923 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278934 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.278995 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.279159 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.279258 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.279358 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.285019 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.285392 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.286445 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.286588 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.287063 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:03.787034139 +0000 UTC m=+21.312672870 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289211 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289243 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289254 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289543 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.289643 4810 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.292470 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.292531 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:03.792515207 +0000 UTC m=+21.318153908 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.293126 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.293276 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.293354 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.293485 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:03.793466214 +0000 UTC m=+21.319104925 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.296254 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.296564 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.296669 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.296903 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.297026 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.297049 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.297050 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.297511 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.297683 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.299723 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.300350 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.301099 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.301342 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.301368 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.301385 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.301433 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:03.801416872 +0000 UTC m=+21.327055563 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.301874 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.303974 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.304459 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.304564 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.305456 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.306358 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.306959 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.307083 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.307129 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.307504 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.308258 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.308959 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.309146 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.309723 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.309973 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.310228 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.310254 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.310297 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.310348 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.311285 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.311607 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.311813 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.311895 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.312024 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.312074 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.313201 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.313349 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.313445 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.313602 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.313767 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.314123 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.314268 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.314353 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.314517 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.314531 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.316716 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.317258 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.317342 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.317506 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.317588 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318090 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318117 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318120 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318166 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318184 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318351 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318501 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318601 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318668 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318764 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.318997 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.320247 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.320441 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.320561 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.320656 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.321514 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.321639 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.321829 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.321866 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.321869 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.322138 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.322483 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.323909 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.323940 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.323959 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.324024 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.324116 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.324167 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.323725 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.324383 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.324499 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.325897 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.328419 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.328924 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.329404 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.330876 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.345708 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.346656 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.354975 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.355993 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.358700 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.367073 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.369246 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.374565 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-netd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.374756 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-etc-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.374891 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.374977 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375045 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-netns\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375113 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-var-lib-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375183 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-node-log\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e7b43917-aa65-43dc-b71b-7de0af71d3f6-rootfs\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375326 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7b43917-aa65-43dc-b71b-7de0af71d3f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375412 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-cni-binary-copy\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375482 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375562 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-conf-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375639 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69qqx\" (UniqueName: \"kubernetes.io/projected/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-kube-api-access-69qqx\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375706 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-netns\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375769 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-kubelet\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375852 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-config\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375956 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-cni-bin\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376022 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxjpz\" (UniqueName: \"kubernetes.io/projected/c3f6a1ff-10fd-446e-9790-f13f432d1f50-kube-api-access-fxjpz\") pod \"node-resolver-bw4pj\" (UID: \"c3f6a1ff-10fd-446e-9790-f13f432d1f50\") " pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376083 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/12a20549-7332-4c4f-b63a-38afc78107e3-cni-binary-copy\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376145 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376214 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-var-lib-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376217 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-log-socket\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376297 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-k8s-cni-cncf-io\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376319 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-cni-multus\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376360 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-systemd-units\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376383 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-os-release\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376404 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-socket-dir-parent\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376441 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-cni-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376462 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovn-node-metrics-cert\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376508 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqsrd\" (UniqueName: \"kubernetes.io/projected/bfb7a412-4af9-4aa0-a3e8-d46dab040385-kube-api-access-tqsrd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376527 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-bin\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376542 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-hostroot\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376559 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376587 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-env-overrides\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376605 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-ovn-kubernetes\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376612 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7b43917-aa65-43dc-b71b-7de0af71d3f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376624 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-slash\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376643 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-daemon-config\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376671 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-systemd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376672 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-node-log\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376708 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/e7b43917-aa65-43dc-b71b-7de0af71d3f6-rootfs\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.374685 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-netd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376629 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376746 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-netns\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376747 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-kubelet\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376782 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-k8s-cni-cncf-io\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376804 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-cni-multus\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376912 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-systemd-units\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376946 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377351 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-hostroot\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377464 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-log-socket\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-os-release\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377618 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-ovn-kubernetes\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377641 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-slash\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377754 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-socket-dir-parent\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378013 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-cni-binary-copy\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375739 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.375705 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-etc-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378171 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-cni-bin\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378177 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-var-lib-kubelet\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376721 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-kubelet\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378200 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-openvswitch\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378306 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e7b43917-aa65-43dc-b71b-7de0af71d3f6-proxy-tls\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378317 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-cni-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-multus-certs\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378839 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-etc-kubernetes\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378875 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cbsf\" (UniqueName: \"kubernetes.io/projected/12a20549-7332-4c4f-b63a-38afc78107e3-kube-api-access-9cbsf\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378898 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-system-cni-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378918 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-cnibin\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378939 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c3f6a1ff-10fd-446e-9790-f13f432d1f50-hosts-file\") pod \"node-resolver-bw4pj\" (UID: \"c3f6a1ff-10fd-446e-9790-f13f432d1f50\") " pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378961 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-cnibin\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378981 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-os-release\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378995 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-etc-kubernetes\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378749 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/12a20549-7332-4c4f-b63a-38afc78107e3-cni-binary-copy\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379003 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/12a20549-7332-4c4f-b63a-38afc78107e3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379053 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-ovn\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379068 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-script-lib\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379084 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6229\" (UniqueName: \"kubernetes.io/projected/e7b43917-aa65-43dc-b71b-7de0af71d3f6-kube-api-access-z6229\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379108 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-system-cni-dir\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379175 4810 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379186 4810 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379196 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379205 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379214 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379222 4810 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379231 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379239 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379247 4810 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379257 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379266 4810 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379275 4810 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379285 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379294 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379304 4810 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379314 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379322 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379331 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379339 4810 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379347 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379355 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379364 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379372 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379380 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379388 4810 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379396 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379404 4810 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379412 4810 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379420 4810 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379428 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379437 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379445 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378500 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379453 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379498 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379514 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379526 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379538 4810 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379550 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379562 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379574 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379587 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379599 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379614 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379627 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379638 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379646 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379654 4810 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379663 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379673 4810 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379681 4810 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379690 4810 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379699 4810 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379708 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379719 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379739 4810 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379753 4810 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379765 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379776 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379790 4810 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379802 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379814 4810 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379848 4810 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379855 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c3f6a1ff-10fd-446e-9790-f13f432d1f50-hosts-file\") pod \"node-resolver-bw4pj\" (UID: \"c3f6a1ff-10fd-446e-9790-f13f432d1f50\") " pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379474 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-system-cni-dir\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379860 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378518 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-conf-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379883 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379896 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379900 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-system-cni-dir\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378543 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-bin\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378441 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-netns\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379907 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.376163 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378720 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-systemd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379947 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379957 4810 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379966 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379974 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379977 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-cnibin\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379939 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-cnibin\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.379982 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380005 4810 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380018 4810 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380020 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/12a20549-7332-4c4f-b63a-38afc78107e3-os-release\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.378742 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-host-run-multus-certs\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380030 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380043 4810 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380048 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-ovn\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380056 4810 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380069 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380082 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380094 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.377597 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-env-overrides\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380406 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-script-lib\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380419 4810 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380440 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380455 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380467 4810 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380478 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380490 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380501 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380513 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380525 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380537 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380549 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380561 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380574 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380586 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.380597 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381081 4810 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381095 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381107 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381117 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381128 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381140 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381152 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381162 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381172 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381184 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381198 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381208 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381219 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381229 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381246 4810 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381256 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381267 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381277 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381288 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381299 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381309 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381319 4810 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381330 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381340 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381351 4810 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381362 4810 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381372 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381383 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381393 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381404 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381414 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381424 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381435 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381445 4810 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381456 4810 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381467 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381478 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381488 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381498 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381509 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381519 4810 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381529 4810 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381540 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381551 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381564 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381574 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381585 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381598 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381609 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381620 4810 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381631 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381641 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381652 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381662 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381672 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381683 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381694 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381705 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381716 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381726 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381735 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381743 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381752 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381763 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381774 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381784 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381794 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381804 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381814 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381843 4810 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381853 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381864 4810 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381876 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381886 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381896 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381906 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381917 4810 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381928 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381938 4810 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381950 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381960 4810 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381971 4810 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381982 4810 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.381992 4810 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.382002 4810 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.382012 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.382024 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.382201 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-multus-daemon-config\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.382377 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/12a20549-7332-4c4f-b63a-38afc78107e3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.382725 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-config\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.384330 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.385065 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovn-node-metrics-cert\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.385159 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e7b43917-aa65-43dc-b71b-7de0af71d3f6-proxy-tls\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.385591 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.393166 4810 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.393624 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqsrd\" (UniqueName: \"kubernetes.io/projected/bfb7a412-4af9-4aa0-a3e8-d46dab040385-kube-api-access-tqsrd\") pod \"ovnkube-node-sd2lj\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.394344 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.395135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.395249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.395323 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.395381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.395438 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.397518 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxjpz\" (UniqueName: \"kubernetes.io/projected/c3f6a1ff-10fd-446e-9790-f13f432d1f50-kube-api-access-fxjpz\") pod \"node-resolver-bw4pj\" (UID: \"c3f6a1ff-10fd-446e-9790-f13f432d1f50\") " pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.399630 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69qqx\" (UniqueName: \"kubernetes.io/projected/8e9fffc1-16a6-4108-978b-6e85bdfd9c4f-kube-api-access-69qqx\") pod \"multus-vrlxd\" (UID: \"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\") " pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.401716 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6229\" (UniqueName: \"kubernetes.io/projected/e7b43917-aa65-43dc-b71b-7de0af71d3f6-kube-api-access-z6229\") pod \"machine-config-daemon-6752w\" (UID: \"e7b43917-aa65-43dc-b71b-7de0af71d3f6\") " pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.403918 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cbsf\" (UniqueName: \"kubernetes.io/projected/12a20549-7332-4c4f-b63a-38afc78107e3-kube-api-access-9cbsf\") pod \"multus-additional-cni-plugins-lzf84\" (UID: \"12a20549-7332-4c4f-b63a-38afc78107e3\") " pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.404048 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.412622 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.420193 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.427135 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.436036 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.444722 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.454185 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.461768 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.482614 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.482422 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.498696 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-bw4pj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.499937 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.499974 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.499987 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.500004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.500016 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.508250 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.514384 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.521649 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.528135 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3f6a1ff_10fd_446e_9790_f13f432d1f50.slice/crio-0c8e94180264eba306d41ea4d950e87fcf02784d04244330a29cf60bc8701eaa WatchSource:0}: Error finding container 0c8e94180264eba306d41ea4d950e87fcf02784d04244330a29cf60bc8701eaa: Status 404 returned error can't find the container with id 0c8e94180264eba306d41ea4d950e87fcf02784d04244330a29cf60bc8701eaa Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.529119 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-lzf84" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.536760 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.542438 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.547078 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-2bc87c87bdc45d90f23ceff48b02d354c25123f0dc4989b370b5b319e288f131 WatchSource:0}: Error finding container 2bc87c87bdc45d90f23ceff48b02d354c25123f0dc4989b370b5b319e288f131: Status 404 returned error can't find the container with id 2bc87c87bdc45d90f23ceff48b02d354c25123f0dc4989b370b5b319e288f131 Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.547214 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-vrlxd" Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.554027 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-667f2c06d563ebff80d9feae517d4adbe744f2d553a07ebe0820c41011fbb5c4 WatchSource:0}: Error finding container 667f2c06d563ebff80d9feae517d4adbe744f2d553a07ebe0820c41011fbb5c4: Status 404 returned error can't find the container with id 667f2c06d563ebff80d9feae517d4adbe744f2d553a07ebe0820c41011fbb5c4 Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.558182 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12a20549_7332_4c4f_b63a_38afc78107e3.slice/crio-9672dcf62fc8affc9c992481fc5895e6676f641178580648f4e90c9ab4d22ad7 WatchSource:0}: Error finding container 9672dcf62fc8affc9c992481fc5895e6676f641178580648f4e90c9ab4d22ad7: Status 404 returned error can't find the container with id 9672dcf62fc8affc9c992481fc5895e6676f641178580648f4e90c9ab4d22ad7 Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.578279 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfb7a412_4af9_4aa0_a3e8_d46dab040385.slice/crio-2386c291419156c2556618dbce80db50a225cf2f8457f5540fbc4a86fa87eaff WatchSource:0}: Error finding container 2386c291419156c2556618dbce80db50a225cf2f8457f5540fbc4a86fa87eaff: Status 404 returned error can't find the container with id 2386c291419156c2556618dbce80db50a225cf2f8457f5540fbc4a86fa87eaff Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.594323 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7b43917_aa65_43dc_b71b_7de0af71d3f6.slice/crio-62de2f1f618c21f1dd150675e601ff561e70bad25fffaa7760bbd1f5d7cec2b9 WatchSource:0}: Error finding container 62de2f1f618c21f1dd150675e601ff561e70bad25fffaa7760bbd1f5d7cec2b9: Status 404 returned error can't find the container with id 62de2f1f618c21f1dd150675e601ff561e70bad25fffaa7760bbd1f5d7cec2b9 Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.605239 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.605309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.605335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.605363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.605382 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:03 crc kubenswrapper[4810]: W1009 00:07:03.621421 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e9fffc1_16a6_4108_978b_6e85bdfd9c4f.slice/crio-b7f92dd0bc56093f857981b9accab73c5d20136a9916acf0a72ee9a236f81d87 WatchSource:0}: Error finding container b7f92dd0bc56093f857981b9accab73c5d20136a9916acf0a72ee9a236f81d87: Status 404 returned error can't find the container with id b7f92dd0bc56093f857981b9accab73c5d20136a9916acf0a72ee9a236f81d87 Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.708212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.708240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.708248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.708261 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.708270 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.785998 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.786101 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:07:04.786076802 +0000 UTC m=+22.311715503 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.810620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.810654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.810663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.810675 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.810685 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.887446 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.887487 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.887512 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.887535 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887636 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887656 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887668 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887707 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:04.887693048 +0000 UTC m=+22.413331749 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887762 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887783 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:04.88777759 +0000 UTC m=+22.413416291 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887845 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887856 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887865 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887887 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:04.887881323 +0000 UTC m=+22.413520024 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887916 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: E1009 00:07:03.887935 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:04.887930005 +0000 UTC m=+22.413568706 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.915228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.915266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.915275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.915290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:03 crc kubenswrapper[4810]: I1009 00:07:03.915299 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:03Z","lastTransitionTime":"2025-10-09T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.018074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.018115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.018154 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.018169 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.018178 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.120469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.120514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.120525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.120541 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.120554 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.223221 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.223264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.223278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.223294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.223303 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.325469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.325535 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.325546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.325563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.325576 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.393116 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerStarted","Data":"f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.393178 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerStarted","Data":"b7f92dd0bc56093f857981b9accab73c5d20136a9916acf0a72ee9a236f81d87"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.394716 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e463fe61d8190317a8241401b4bd54f507caa7c6be1ebe45a82f85d083dbf170"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.396057 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-bw4pj" event={"ID":"c3f6a1ff-10fd-446e-9790-f13f432d1f50","Type":"ContainerStarted","Data":"799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.396096 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-bw4pj" event={"ID":"c3f6a1ff-10fd-446e-9790-f13f432d1f50","Type":"ContainerStarted","Data":"0c8e94180264eba306d41ea4d950e87fcf02784d04244330a29cf60bc8701eaa"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.397789 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.397914 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.397926 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"667f2c06d563ebff80d9feae517d4adbe744f2d553a07ebe0820c41011fbb5c4"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.399167 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.399589 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.400969 4810 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c" exitCode=255 Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.401027 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.401057 4810 scope.go:117] "RemoveContainer" containerID="a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.402610 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" exitCode=0 Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.402670 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.402708 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"2386c291419156c2556618dbce80db50a225cf2f8457f5540fbc4a86fa87eaff"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.404625 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.404654 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2bc87c87bdc45d90f23ceff48b02d354c25123f0dc4989b370b5b319e288f131"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.406456 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.406503 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.406518 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"62de2f1f618c21f1dd150675e601ff561e70bad25fffaa7760bbd1f5d7cec2b9"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.407763 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.407903 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerDied","Data":"dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.407881 4810 generic.go:334] "Generic (PLEG): container finished" podID="12a20549-7332-4c4f-b63a-38afc78107e3" containerID="dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de" exitCode=0 Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.408039 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerStarted","Data":"9672dcf62fc8affc9c992481fc5895e6676f641178580648f4e90c9ab4d22ad7"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.414451 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.415029 4810 scope.go:117] "RemoveContainer" containerID="e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c" Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.415234 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.427719 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.429096 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.429131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.429144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.429162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.429177 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.443458 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.458494 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.475232 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.487652 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.489613 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.501555 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.516083 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.532338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.532390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.532404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.532426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.532442 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.538140 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.551083 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.565007 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.580914 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.596300 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:06:57Z\\\",\\\"message\\\":\\\"W1009 00:06:46.421409 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1009 00:06:46.421705 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759968406 cert, and key in /tmp/serving-cert-1267896422/serving-signer.crt, /tmp/serving-cert-1267896422/serving-signer.key\\\\nI1009 00:06:46.769661 1 observer_polling.go:159] Starting file observer\\\\nW1009 00:06:46.772000 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1009 00:06:46.772278 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:06:46.775690 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1267896422/tls.crt::/tmp/serving-cert-1267896422/tls.key\\\\\\\"\\\\nF1009 00:06:57.044791 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.610073 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.620447 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.631838 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.634440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.634473 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.634482 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.634497 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.634507 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.643497 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.655730 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.666274 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.676982 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.691004 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.706462 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.723115 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.734636 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.736313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.736345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.736356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.736372 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.736383 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.751703 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.789815 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-5qcmj"] Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.790230 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.792238 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.792355 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.792604 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.793147 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.795641 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.795755 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:07:06.795734477 +0000 UTC m=+24.321373208 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.802887 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.815384 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.830395 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.838757 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.838793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.838802 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.838834 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.838844 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.854160 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.869741 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.882074 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.894053 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896528 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896570 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896593 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-246vj\" (UniqueName: \"kubernetes.io/projected/c70efaa9-20ad-45fe-af4f-a068e313dad3-kube-api-access-246vj\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896612 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896633 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c70efaa9-20ad-45fe-af4f-a068e313dad3-serviceca\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896652 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.896668 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c70efaa9-20ad-45fe-af4f-a068e313dad3-host\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896785 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896800 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896811 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896861 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:06.896848748 +0000 UTC m=+24.422487449 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896908 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896927 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:06.89692145 +0000 UTC m=+24.422560151 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896966 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896974 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896981 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.896999 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:06.896992523 +0000 UTC m=+24.422631224 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.897089 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: E1009 00:07:04.897171 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:06.897151157 +0000 UTC m=+24.422789928 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.910104 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.920505 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.930747 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.941914 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.941952 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.941963 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.941979 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.941992 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:04Z","lastTransitionTime":"2025-10-09T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.942046 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.951845 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.966186 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:06:57Z\\\",\\\"message\\\":\\\"W1009 00:06:46.421409 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1009 00:06:46.421705 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759968406 cert, and key in /tmp/serving-cert-1267896422/serving-signer.crt, /tmp/serving-cert-1267896422/serving-signer.key\\\\nI1009 00:06:46.769661 1 observer_polling.go:159] Starting file observer\\\\nW1009 00:06:46.772000 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1009 00:06:46.772278 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:06:46.775690 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1267896422/tls.crt::/tmp/serving-cert-1267896422/tls.key\\\\\\\"\\\\nF1009 00:06:57.044791 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.978745 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.998258 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-246vj\" (UniqueName: \"kubernetes.io/projected/c70efaa9-20ad-45fe-af4f-a068e313dad3-kube-api-access-246vj\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.998302 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c70efaa9-20ad-45fe-af4f-a068e313dad3-serviceca\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.998320 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c70efaa9-20ad-45fe-af4f-a068e313dad3-host\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:04 crc kubenswrapper[4810]: I1009 00:07:04.998376 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c70efaa9-20ad-45fe-af4f-a068e313dad3-host\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.000517 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c70efaa9-20ad-45fe-af4f-a068e313dad3-serviceca\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.014942 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-246vj\" (UniqueName: \"kubernetes.io/projected/c70efaa9-20ad-45fe-af4f-a068e313dad3-kube-api-access-246vj\") pod \"node-ca-5qcmj\" (UID: \"c70efaa9-20ad-45fe-af4f-a068e313dad3\") " pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.043951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.044002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.044018 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.044039 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.044058 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.146545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.146600 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.146613 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.146637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.146652 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.248525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.248565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.248578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.248595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.248609 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.252709 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.252709 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.252840 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:05 crc kubenswrapper[4810]: E1009 00:07:05.252985 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:05 crc kubenswrapper[4810]: E1009 00:07:05.253123 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:05 crc kubenswrapper[4810]: E1009 00:07:05.253232 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.256134 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.256771 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.257888 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.258583 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.259666 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.260336 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.261415 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.262594 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.263393 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.264410 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.265097 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.266434 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.267162 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.267736 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.268673 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.269229 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.270201 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.270616 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.271441 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.272415 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.272924 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.274059 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.274557 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.275812 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.276364 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.277106 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.278496 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.279115 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.280296 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.280952 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.282071 4810 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.282204 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.284495 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.285755 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.286468 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.287948 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-5qcmj" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.288294 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.289100 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.290350 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.291235 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.292598 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.293303 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.294568 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.295450 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.296652 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.297315 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.298608 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.299330 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.300661 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.301307 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.302325 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.302942 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.303633 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.305754 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.306517 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.360654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.360690 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.360701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.360716 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.360727 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.413389 4810 generic.go:334] "Generic (PLEG): container finished" podID="12a20549-7332-4c4f-b63a-38afc78107e3" containerID="a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603" exitCode=0 Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.413479 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerDied","Data":"a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.416384 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5qcmj" event={"ID":"c70efaa9-20ad-45fe-af4f-a068e313dad3","Type":"ContainerStarted","Data":"96bdd2cab9945b3debd84cf0af6b922fc767db441d930801353b3bdf922a9a1d"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.418722 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.421908 4810 scope.go:117] "RemoveContainer" containerID="e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c" Oct 09 00:07:05 crc kubenswrapper[4810]: E1009 00:07:05.422269 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.425914 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.425958 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.425970 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.425978 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.427130 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.440630 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1237974e342056b94c8ad8702c7b1920f3d39af535a1d6ffe94cac2ace16544\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:06:57Z\\\",\\\"message\\\":\\\"W1009 00:06:46.421409 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1009 00:06:46.421705 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759968406 cert, and key in /tmp/serving-cert-1267896422/serving-signer.crt, /tmp/serving-cert-1267896422/serving-signer.key\\\\nI1009 00:06:46.769661 1 observer_polling.go:159] Starting file observer\\\\nW1009 00:06:46.772000 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1009 00:06:46.772278 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:06:46.775690 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1267896422/tls.crt::/tmp/serving-cert-1267896422/tls.key\\\\\\\"\\\\nF1009 00:06:57.044791 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.456043 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.462866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.462907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.462916 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.462932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.462946 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.478776 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.492250 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.508102 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.518722 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.531796 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.543999 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.557588 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.566075 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.566108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.566120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.566136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.566148 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.568003 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.585053 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.597991 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.609511 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.621405 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.636137 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.645844 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.658566 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.668250 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.668288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.668300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.668316 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.668327 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.669184 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.688990 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.700846 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.717928 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.725995 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.734578 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.737118 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.750211 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.770427 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.770458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.770469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.770482 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.770491 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.785592 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.825534 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.868849 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.872349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.872411 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.872429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.872449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.872462 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.908615 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.949734 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.975224 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.975257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.975265 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.975280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.975290 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:05Z","lastTransitionTime":"2025-10-09T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:05 crc kubenswrapper[4810]: I1009 00:07:05.988797 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:05Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.033010 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.078336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.078414 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.078433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.078458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.078475 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.081015 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.113373 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.158231 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.181923 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.181989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.182005 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.182029 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.182043 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.190935 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.240712 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.272724 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.284762 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.284851 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.284874 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.284897 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.284913 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.311256 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.356520 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.388514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.388572 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.388591 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.388628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.388648 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.392808 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.436515 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerDied","Data":"764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.436451 4810 generic.go:334] "Generic (PLEG): container finished" podID="12a20549-7332-4c4f-b63a-38afc78107e3" containerID="764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e" exitCode=0 Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.437763 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.439185 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-5qcmj" event={"ID":"c70efaa9-20ad-45fe-af4f-a068e313dad3","Type":"ContainerStarted","Data":"700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.444965 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.445040 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.446891 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.447687 4810 scope.go:117] "RemoveContainer" containerID="e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c" Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.447934 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.477804 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.490537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.490570 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.490581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.490597 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.490609 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.512524 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.548507 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.591247 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.592500 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.592559 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.592587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.592619 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.592642 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.630807 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.669015 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.695295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.695373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.695398 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.695432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.695459 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.719790 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.749273 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.786894 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.797863 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.798028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.798121 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.798204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.798268 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.818576 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.818770 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:07:10.818738305 +0000 UTC m=+28.344377016 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.829175 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.868258 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.900213 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.900241 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.900249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.900273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.900283 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:06Z","lastTransitionTime":"2025-10-09T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.909602 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.919278 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.919309 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.919332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.919358 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919428 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919432 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919441 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919455 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919428 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919431 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919492 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919498 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919483 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:10.919468105 +0000 UTC m=+28.445106806 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919522 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:10.919514637 +0000 UTC m=+28.445153338 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919533 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:10.919528157 +0000 UTC m=+28.445166858 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:06 crc kubenswrapper[4810]: E1009 00:07:06.919542 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:10.919538178 +0000 UTC m=+28.445176879 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.951870 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:06 crc kubenswrapper[4810]: I1009 00:07:06.988302 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:06Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.002085 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.002120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.002134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.002149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.002162 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.031843 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.070194 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.104165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.104516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.104706 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.104945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.105176 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.125411 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.148953 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.208329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.208378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.208389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.208408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.208420 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.252975 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.253027 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:07 crc kubenswrapper[4810]: E1009 00:07:07.253135 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.252996 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:07 crc kubenswrapper[4810]: E1009 00:07:07.253316 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:07 crc kubenswrapper[4810]: E1009 00:07:07.253478 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.311200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.311252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.311270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.311294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.311311 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.414787 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.414873 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.414891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.414915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.414933 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.454915 4810 generic.go:334] "Generic (PLEG): container finished" podID="12a20549-7332-4c4f-b63a-38afc78107e3" containerID="60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa" exitCode=0 Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.455051 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerDied","Data":"60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.485613 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.507180 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.518932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.518992 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.519011 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.519051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.519069 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.530276 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.548097 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.562465 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.579295 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.594648 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.620301 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.621521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.621576 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.621591 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.621616 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.621629 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.637681 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.649959 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.671588 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.715444 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.724207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.724240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.724248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.724264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.724273 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.733385 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.744456 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.754809 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:07Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.826894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.826932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.826941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.826956 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.826966 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.929100 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.929136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.929147 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.929186 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:07 crc kubenswrapper[4810]: I1009 00:07:07.929200 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:07Z","lastTransitionTime":"2025-10-09T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.032011 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.032051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.032069 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.032088 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.032099 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.135701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.135784 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.135809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.135885 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.135909 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.238858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.239250 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.239507 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.239722 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.239968 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.343081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.343148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.343172 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.343203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.343227 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.446380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.446433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.446451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.446474 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.446490 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.464738 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.469400 4810 generic.go:334] "Generic (PLEG): container finished" podID="12a20549-7332-4c4f-b63a-38afc78107e3" containerID="860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69" exitCode=0 Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.469457 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerDied","Data":"860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.491565 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.525448 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.548884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.548941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.548961 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.548986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.549005 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.550998 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.565483 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.585702 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.604220 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.619483 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.637562 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.651439 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.652270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.652367 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.652426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.652459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.652484 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.670445 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.686131 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.700055 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.719165 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.734986 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.751757 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:08Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.754788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.754852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.754869 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.754888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.754903 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.858289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.858345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.858363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.858387 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.858406 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.961218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.961270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.961287 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.961311 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:08 crc kubenswrapper[4810]: I1009 00:07:08.961329 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:08Z","lastTransitionTime":"2025-10-09T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.064979 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.065055 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.065070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.065096 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.065112 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.168064 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.168108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.168122 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.168141 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.168153 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.253109 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.253259 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:09 crc kubenswrapper[4810]: E1009 00:07:09.253345 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:09 crc kubenswrapper[4810]: E1009 00:07:09.253469 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.253531 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:09 crc kubenswrapper[4810]: E1009 00:07:09.253631 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.270254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.270287 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.270299 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.270315 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.270327 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.373566 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.373635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.373654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.373681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.373700 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.476902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.476961 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.476983 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.477010 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.477032 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.485259 4810 generic.go:334] "Generic (PLEG): container finished" podID="12a20549-7332-4c4f-b63a-38afc78107e3" containerID="ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9" exitCode=0 Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.485321 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerDied","Data":"ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.508955 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.547239 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.564200 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.578896 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.580897 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.580942 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.580958 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.580975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.580987 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.592539 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.604148 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.622619 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.633433 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.648475 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.664720 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.677404 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.682956 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.682998 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.683012 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.683029 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.683042 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.687073 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.697856 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.707722 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.721239 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:09Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.785395 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.785447 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.785463 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.785487 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.785504 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.887856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.887890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.887900 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.887913 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.887927 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.991231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.991293 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.991313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.991340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:09 crc kubenswrapper[4810]: I1009 00:07:09.991360 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:09Z","lastTransitionTime":"2025-10-09T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.094383 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.094472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.094545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.094703 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.094737 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.197851 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.197917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.197934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.197958 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.197975 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.301228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.301279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.301298 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.301320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.301335 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.403272 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.403323 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.403340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.403361 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.403373 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.495710 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.496064 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.502797 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" event={"ID":"12a20549-7332-4c4f-b63a-38afc78107e3","Type":"ContainerStarted","Data":"ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.505249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.505286 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.505303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.505324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.505341 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.521155 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.538201 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.553267 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.558274 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.579005 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.600276 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.608044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.608098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.608127 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.608152 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.608171 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.618970 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.650521 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.666076 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.676526 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.690475 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.705855 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.710581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.710621 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.710634 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.710652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.710664 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.728916 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.745352 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.760381 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.778328 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.798554 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.813201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.813259 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.813278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.813302 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.813319 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.817523 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.832847 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.842759 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.857214 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.867790 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.872182 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.872346 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:07:18.872324988 +0000 UTC m=+36.397963689 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.881736 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.895467 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.916160 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.916189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.916197 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.916236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.916249 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:10Z","lastTransitionTime":"2025-10-09T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.916579 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.934812 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.949522 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.964270 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.973467 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.973526 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.973559 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.973592 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973645 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973682 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973697 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973699 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973748 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:18.973732858 +0000 UTC m=+36.499371559 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973769 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:18.973760019 +0000 UTC m=+36.499398720 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973784 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973851 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:18.973806761 +0000 UTC m=+36.499445482 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973862 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973884 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973897 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:10 crc kubenswrapper[4810]: E1009 00:07:10.973928 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:18.973919564 +0000 UTC m=+36.499558355 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.975042 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:10 crc kubenswrapper[4810]: I1009 00:07:10.992389 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:10Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.005031 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.018928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.018962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.018973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.019009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.019026 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.122762 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.122815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.122867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.122890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.122916 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.225576 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.225653 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.225681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.225711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.225733 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.253423 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.253427 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.253638 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:11 crc kubenswrapper[4810]: E1009 00:07:11.253539 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:11 crc kubenswrapper[4810]: E1009 00:07:11.253699 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:11 crc kubenswrapper[4810]: E1009 00:07:11.253881 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.329804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.329891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.329909 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.329934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.329952 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.433035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.433105 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.433127 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.433156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.433177 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.511149 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.511202 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.537996 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.538050 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.538068 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.538090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.538108 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.546914 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.565758 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.597942 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.618586 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.635451 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.641588 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.641635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.641647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.641666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.641680 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.655001 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.668300 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.690177 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.707570 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.728453 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.742631 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.744781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.744839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.744850 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.744869 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.744881 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.755354 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.763306 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.774369 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.785189 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.797528 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:11Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.847591 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.847630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.847641 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.847657 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.847667 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.951154 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.951231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.951251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.951275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:11 crc kubenswrapper[4810]: I1009 00:07:11.951292 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:11Z","lastTransitionTime":"2025-10-09T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.053867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.053916 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.053929 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.053947 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.053961 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.157457 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.157512 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.157529 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.157551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.157568 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.259926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.259984 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.260090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.260113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.260125 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.362116 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.362146 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.362155 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.362168 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.362178 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.463712 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.463747 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.463756 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.463770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.463778 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.565933 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.565988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.566005 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.566028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.566046 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.669361 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.669406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.669423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.669464 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.669480 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.738695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.738731 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.738741 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.738771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.738785 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: E1009 00:07:12.759218 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:12Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.763204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.763245 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.763257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.763273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.763283 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: E1009 00:07:12.775696 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:12Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.779382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.779407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.779416 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.779429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.779438 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: E1009 00:07:12.791811 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:12Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.795015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.795043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.795052 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.795066 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.795074 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: E1009 00:07:12.805281 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:12Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.808247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.808270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.808278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.808290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.808299 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: E1009 00:07:12.818426 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:12Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:12 crc kubenswrapper[4810]: E1009 00:07:12.818542 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.820262 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.820286 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.820294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.820306 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.820314 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.922589 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.922622 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.922630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.922643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:12 crc kubenswrapper[4810]: I1009 00:07:12.922652 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:12Z","lastTransitionTime":"2025-10-09T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.025952 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.026011 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.026027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.026052 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.026075 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.129026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.129064 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.129076 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.129091 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.129105 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.231976 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.232033 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.232045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.232063 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.232078 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.253357 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.253477 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.253705 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:13 crc kubenswrapper[4810]: E1009 00:07:13.253701 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:13 crc kubenswrapper[4810]: E1009 00:07:13.253807 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:13 crc kubenswrapper[4810]: E1009 00:07:13.254005 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.272299 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.290282 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.322031 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.334971 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.335013 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.335029 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.335051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.335068 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.349889 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.372237 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.390088 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.404656 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.416023 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.431616 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.437425 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.437487 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.437513 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.437548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.437575 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.444593 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.458783 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.473182 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.492952 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.509468 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.519545 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/0.log" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.523082 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa" exitCode=1 Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.523149 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.524373 4810 scope.go:117] "RemoveContainer" containerID="7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.525110 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.540215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.540245 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.540254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.540271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.540283 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.542722 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.563939 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.579520 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.593323 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.612345 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.625748 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.643362 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.643410 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.643507 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.643973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.644025 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.644891 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"olicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1009 00:07:12.842755 6163 factory.go:656] Stopping watch factory\\\\nI1009 00:07:12.842791 6163 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1009 00:07:12.842850 6163 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.842889 6163 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 00:07:12.842990 6163 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843028 6163 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843325 6163 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843346 6163 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843456 6163 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.660517 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.676669 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.692601 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.715612 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.731368 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.746681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.746710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.746719 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.746733 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.746744 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.746911 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.761736 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.777483 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:13Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.850643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.850678 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.850688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.850705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.850716 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.958339 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.958398 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.958414 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.958435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:13 crc kubenswrapper[4810]: I1009 00:07:13.958455 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:13Z","lastTransitionTime":"2025-10-09T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.075213 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.075284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.075308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.075338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.075362 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.178703 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.178750 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.178764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.178784 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.178799 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.280708 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.280756 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.280772 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.280794 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.280809 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.382964 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.383006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.383019 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.383038 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.383053 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.486092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.486144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.486162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.486185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.486206 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.529779 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/1.log" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.530855 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/0.log" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.535601 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034" exitCode=1 Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.535670 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.535788 4810 scope.go:117] "RemoveContainer" containerID="7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.536960 4810 scope.go:117] "RemoveContainer" containerID="220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034" Oct 09 00:07:14 crc kubenswrapper[4810]: E1009 00:07:14.537381 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.563009 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.581215 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.588439 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.588599 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.588684 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.588795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.588902 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.600733 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.618612 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.639118 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.656664 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.678125 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.692007 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.692057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.692081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.692112 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.692134 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.710574 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.727213 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.739629 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.753572 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.777288 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7fa58d9cf660fe64d09bac793f36f67edda213ec5639a091dcb249fb4e092caa\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:12Z\\\",\\\"message\\\":\\\"olicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1009 00:07:12.842755 6163 factory.go:656] Stopping watch factory\\\\nI1009 00:07:12.842791 6163 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1009 00:07:12.842850 6163 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.842889 6163 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 00:07:12.842990 6163 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843028 6163 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843325 6163 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843346 6163 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 00:07:12.843456 6163 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.793348 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.795252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.795295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.795466 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.795492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.795510 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.806512 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.817288 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.898230 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.898259 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.898268 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.898282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:14 crc kubenswrapper[4810]: I1009 00:07:14.898290 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:14Z","lastTransitionTime":"2025-10-09T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.001151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.001202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.001212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.001229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.001239 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.103741 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.103784 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.103793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.103808 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.103835 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.206784 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.207125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.207304 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.207469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.207646 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.253390 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.253392 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.253538 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:15 crc kubenswrapper[4810]: E1009 00:07:15.253550 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:15 crc kubenswrapper[4810]: E1009 00:07:15.253627 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:15 crc kubenswrapper[4810]: E1009 00:07:15.253684 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.310483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.310515 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.310525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.310540 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.310550 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.412898 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.412949 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.412966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.412990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.413008 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.517350 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.517403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.517421 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.517448 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.517465 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.542694 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/1.log" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.547646 4810 scope.go:117] "RemoveContainer" containerID="220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034" Oct 09 00:07:15 crc kubenswrapper[4810]: E1009 00:07:15.547961 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.570235 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.590197 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.605098 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.620091 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.620151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.620169 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.620200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.620218 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.621617 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.638407 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.655293 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.670345 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.701868 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.719674 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.722487 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.722611 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.722697 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.722783 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.722887 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.735846 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.750471 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.765387 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.786806 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.802218 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.817900 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:15Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.824777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.824807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.824839 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.824869 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.824887 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.927629 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.927685 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.927704 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.927732 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:15 crc kubenswrapper[4810]: I1009 00:07:15.927751 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:15Z","lastTransitionTime":"2025-10-09T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.030749 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.030876 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.030899 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.030923 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.030940 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.127465 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t"] Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.128585 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.133684 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.134055 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.134257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.134451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.133718 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.134784 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.136901 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.169662 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.182563 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.194172 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.215227 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.227333 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3ee98c8-29c6-4162-beca-d8abe019a814-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.227437 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3ee98c8-29c6-4162-beca-d8abe019a814-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.227522 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3ee98c8-29c6-4162-beca-d8abe019a814-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.227694 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7rxj\" (UniqueName: \"kubernetes.io/projected/b3ee98c8-29c6-4162-beca-d8abe019a814-kube-api-access-n7rxj\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.238308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.238348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.238379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.238399 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.238411 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.246009 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.260647 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.278154 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.292271 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.304901 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.317099 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.328725 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3ee98c8-29c6-4162-beca-d8abe019a814-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.328786 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7rxj\" (UniqueName: \"kubernetes.io/projected/b3ee98c8-29c6-4162-beca-d8abe019a814-kube-api-access-n7rxj\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.328837 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3ee98c8-29c6-4162-beca-d8abe019a814-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.328875 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3ee98c8-29c6-4162-beca-d8abe019a814-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.329439 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b3ee98c8-29c6-4162-beca-d8abe019a814-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.329538 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b3ee98c8-29c6-4162-beca-d8abe019a814-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.332587 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.338449 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b3ee98c8-29c6-4162-beca-d8abe019a814-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.340331 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.340351 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.340359 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.340373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.340382 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.343770 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.345137 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7rxj\" (UniqueName: \"kubernetes.io/projected/b3ee98c8-29c6-4162-beca-d8abe019a814-kube-api-access-n7rxj\") pod \"ovnkube-control-plane-749d76644c-xsv9t\" (UID: \"b3ee98c8-29c6-4162-beca-d8abe019a814\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.357283 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.371315 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.383053 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.396729 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:16Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.442534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.442609 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.442620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.442635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.442978 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.450785 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" Oct 09 00:07:16 crc kubenswrapper[4810]: W1009 00:07:16.460763 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb3ee98c8_29c6_4162_beca_d8abe019a814.slice/crio-1f79796a8fce360f0b61c08e82f00b63161015767d599613cd6a076e03ff6903 WatchSource:0}: Error finding container 1f79796a8fce360f0b61c08e82f00b63161015767d599613cd6a076e03ff6903: Status 404 returned error can't find the container with id 1f79796a8fce360f0b61c08e82f00b63161015767d599613cd6a076e03ff6903 Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.545449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.545485 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.545494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.545507 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.545516 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.549961 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" event={"ID":"b3ee98c8-29c6-4162-beca-d8abe019a814","Type":"ContainerStarted","Data":"1f79796a8fce360f0b61c08e82f00b63161015767d599613cd6a076e03ff6903"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.647876 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.647920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.647930 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.647946 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.647957 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.753678 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.753737 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.753755 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.753782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.753802 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.857947 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.858025 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.858048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.858079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.858100 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.960242 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.960283 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.960295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.960314 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:16 crc kubenswrapper[4810]: I1009 00:07:16.960326 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:16Z","lastTransitionTime":"2025-10-09T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.063266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.063340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.063363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.063394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.063417 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.166379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.166437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.166455 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.166501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.166538 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.237629 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-xpz29"] Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.238373 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.238480 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.253597 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.253685 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.253597 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.253813 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.253989 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.254138 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.261397 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.269891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.269948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.269972 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.270002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.270023 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.283433 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.313750 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.334686 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.339228 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t87mn\" (UniqueName: \"kubernetes.io/projected/f74b5f49-e104-4aa7-9472-14d1e706785c-kube-api-access-t87mn\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.339340 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.348700 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.370486 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.373365 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.373436 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.373471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.373502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.373524 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.387698 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.399200 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.414026 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.423306 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.435582 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.442146 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t87mn\" (UniqueName: \"kubernetes.io/projected/f74b5f49-e104-4aa7-9472-14d1e706785c-kube-api-access-t87mn\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.442242 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.442513 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.442574 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:07:17.942551296 +0000 UTC m=+35.468190017 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.451438 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.462403 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t87mn\" (UniqueName: \"kubernetes.io/projected/f74b5f49-e104-4aa7-9472-14d1e706785c-kube-api-access-t87mn\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.463083 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.476283 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.476492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.476520 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.476532 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.476548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.476561 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.494462 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.508955 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.519516 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.554527 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" event={"ID":"b3ee98c8-29c6-4162-beca-d8abe019a814","Type":"ContainerStarted","Data":"6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.554567 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" event={"ID":"b3ee98c8-29c6-4162-beca-d8abe019a814","Type":"ContainerStarted","Data":"4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.566535 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.578739 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.578788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.578801 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.578871 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.578888 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.590373 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.609418 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.622447 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.641794 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.658557 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.683379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.683450 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.683474 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.683509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.683548 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.690040 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.709388 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.724553 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.743174 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.758850 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.774204 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.785710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.785753 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.785765 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.785783 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.785796 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.790649 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.803309 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.818478 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.829354 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.842053 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:17Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.888659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.888727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.888745 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.888770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.888792 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.946571 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.946850 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:17 crc kubenswrapper[4810]: E1009 00:07:17.946958 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:07:18.946929721 +0000 UTC m=+36.472568452 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.992413 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.992486 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.992503 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.992528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:17 crc kubenswrapper[4810]: I1009 00:07:17.992546 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:17Z","lastTransitionTime":"2025-10-09T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.094972 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.095015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.095027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.095046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.095058 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.198196 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.198269 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.198289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.198317 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.198335 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.254353 4810 scope.go:117] "RemoveContainer" containerID="e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.306392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.306451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.306471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.306502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.306525 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.408738 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.408811 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.408868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.408899 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.408924 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.512618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.512668 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.512683 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.512708 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.512726 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.558779 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.561259 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.561532 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.576932 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.604743 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.615368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.615422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.615439 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.615465 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.615487 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.621221 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.636952 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.650988 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.664780 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.686067 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.701002 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.709543 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.718384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.718423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.718435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.718454 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.718466 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.751488 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.771620 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.794004 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.808191 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.817075 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.820346 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.820381 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.820390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.820403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.820413 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.831372 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.872695 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.888246 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:18Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.923523 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.923563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.923574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.923592 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.923602 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:18Z","lastTransitionTime":"2025-10-09T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.958912 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:18 crc kubenswrapper[4810]: E1009 00:07:18.959067 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:07:34.959046997 +0000 UTC m=+52.484685698 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:18 crc kubenswrapper[4810]: I1009 00:07:18.959110 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:18 crc kubenswrapper[4810]: E1009 00:07:18.959326 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:18 crc kubenswrapper[4810]: E1009 00:07:18.959417 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:07:20.959393867 +0000 UTC m=+38.485032608 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.026622 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.026687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.026705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.026744 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.026763 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.060178 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.060285 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.060336 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.060378 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060391 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060524 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:35.060489469 +0000 UTC m=+52.586128210 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060550 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060567 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060616 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060685 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060643 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:35.060623142 +0000 UTC m=+52.586261943 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060710 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060577 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060743 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060800 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:35.060774067 +0000 UTC m=+52.586412808 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.060869 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:07:35.060816088 +0000 UTC m=+52.586454819 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.129948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.130006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.130024 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.130048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.130065 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.233726 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.233782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.233805 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.233883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.233908 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.253618 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.253635 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.253873 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.253813 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.253956 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.254088 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.254187 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:19 crc kubenswrapper[4810]: E1009 00:07:19.254270 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.337153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.337228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.337253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.337286 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.337309 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.440637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.440723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.440749 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.440777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.440798 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.543922 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.543980 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.543998 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.544023 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.544041 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.647128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.647197 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.647220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.647252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.647275 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.750525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.750619 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.750643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.750671 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.750693 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.853233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.853270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.853279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.853291 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.853302 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.956746 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.956815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.956860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.956886 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:19 crc kubenswrapper[4810]: I1009 00:07:19.956904 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:19Z","lastTransitionTime":"2025-10-09T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.060206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.060273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.060294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.060320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.060341 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.163329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.163391 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.163408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.163435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.163458 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.266550 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.266615 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.266633 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.266662 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.266683 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.368815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.368912 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.368927 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.368962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.368980 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.472143 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.472208 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.472225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.472249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.472267 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.575348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.575417 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.575440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.575475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.575500 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.678773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.678837 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.678853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.678873 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.678886 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.781229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.781283 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.781300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.781324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.781342 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.884790 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.884877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.884895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.884921 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.884938 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.980804 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:20 crc kubenswrapper[4810]: E1009 00:07:20.981031 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:20 crc kubenswrapper[4810]: E1009 00:07:20.981113 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:07:24.981090578 +0000 UTC m=+42.506729319 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.988063 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.988115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.988141 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.988172 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:20 crc kubenswrapper[4810]: I1009 00:07:20.988194 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:20Z","lastTransitionTime":"2025-10-09T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.091499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.091578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.091602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.091633 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.091656 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.194766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.194881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.194905 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.194932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.194956 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.253226 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.253338 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:21 crc kubenswrapper[4810]: E1009 00:07:21.253394 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.253434 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.253432 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:21 crc kubenswrapper[4810]: E1009 00:07:21.253587 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:21 crc kubenswrapper[4810]: E1009 00:07:21.253779 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:21 crc kubenswrapper[4810]: E1009 00:07:21.254405 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.297891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.297960 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.297979 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.298009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.298026 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.401243 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.401301 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.401318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.401343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.401361 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.503841 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.503887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.503901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.503920 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.503933 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.606906 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.606980 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.607002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.607031 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.607049 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.710510 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.710577 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.710602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.710633 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.710657 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.814115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.814184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.814207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.814239 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.814262 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.917076 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.917128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.917158 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.917175 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:21 crc kubenswrapper[4810]: I1009 00:07:21.917193 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:21Z","lastTransitionTime":"2025-10-09T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.020628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.020667 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.020679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.020697 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.020710 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.123883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.123953 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.123974 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.124000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.124017 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.230707 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.230768 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.230785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.230809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.230875 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.338699 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.338772 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.338796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.338881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.338907 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.442569 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.442655 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.442684 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.442716 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.442739 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.545647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.545712 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.545737 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.545766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.545785 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.648257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.648299 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.648309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.648325 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.648335 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.751526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.751584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.751599 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.751620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.751635 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.854699 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.854785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.854803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.854861 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.854880 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.958147 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.958216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.958234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.958260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:22 crc kubenswrapper[4810]: I1009 00:07:22.958300 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:22Z","lastTransitionTime":"2025-10-09T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.044108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.044196 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.044219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.044254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.044278 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.066280 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.072673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.072779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.072806 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.072893 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.072917 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.095166 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.101571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.101618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.101677 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.101702 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.102244 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.126348 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.131176 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.131239 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.131264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.131296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.131321 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.152976 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.159015 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.159172 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.159195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.159218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.159266 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.180213 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.180475 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.182703 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.182800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.182859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.182891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.182912 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.253134 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.253227 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.253333 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.253453 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.253569 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.253613 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.253752 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:23 crc kubenswrapper[4810]: E1009 00:07:23.253959 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.273773 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.286133 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.286225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.286277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.286304 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.286353 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.293257 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.313965 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.351849 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.372934 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.389625 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.389705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.389724 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.389753 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.389772 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.390636 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.412206 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.432358 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.462952 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.486097 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.492754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.492808 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.492859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.492890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.492911 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.501905 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.519391 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.533381 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.548774 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.566678 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.581382 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.595533 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:23Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.595996 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.596027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.596037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.596056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.596067 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.699745 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.699861 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.699880 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.699902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.699950 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.803349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.803644 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.803662 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.803685 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.803702 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.906983 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.907057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.907077 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.907115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:23 crc kubenswrapper[4810]: I1009 00:07:23.907135 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:23Z","lastTransitionTime":"2025-10-09T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.010279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.010356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.010374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.010401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.010423 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.115483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.115550 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.115567 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.115592 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.115609 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.219779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.219879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.219912 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.219941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.219958 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.323295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.323342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.323354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.323374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.323387 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.426397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.426450 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.426468 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.426492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.426509 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.529509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.529577 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.529603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.529631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.529656 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.632450 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.632526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.632553 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.632580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.632600 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.735482 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.735562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.735585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.735613 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.735632 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.838412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.838459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.838470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.838486 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.838498 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.941232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.941295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.941313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.941339 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:24 crc kubenswrapper[4810]: I1009 00:07:24.941357 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:24Z","lastTransitionTime":"2025-10-09T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.026504 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:25 crc kubenswrapper[4810]: E1009 00:07:25.026699 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:25 crc kubenswrapper[4810]: E1009 00:07:25.026783 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:07:33.026759635 +0000 UTC m=+50.552398376 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.044303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.044345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.044356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.044373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.044385 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.147877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.147959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.147977 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.148002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.148019 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.251092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.251154 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.251173 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.251198 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.251217 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.253807 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.253920 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.253871 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.253962 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:25 crc kubenswrapper[4810]: E1009 00:07:25.254039 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:25 crc kubenswrapper[4810]: E1009 00:07:25.254161 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:25 crc kubenswrapper[4810]: E1009 00:07:25.254212 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:25 crc kubenswrapper[4810]: E1009 00:07:25.254372 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.354222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.354293 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.354310 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.354333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.354350 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.457313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.457378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.457396 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.457422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.457440 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.560134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.560205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.560222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.560245 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.560261 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.663507 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.663571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.663595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.663625 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.663646 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.766771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.766880 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.766914 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.766943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.766963 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.869510 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.869546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.869557 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.869574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.869586 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.972550 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.972628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.972647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.972663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:25 crc kubenswrapper[4810]: I1009 00:07:25.972676 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:25Z","lastTransitionTime":"2025-10-09T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.076175 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.076251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.076270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.076295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.076315 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.180045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.180109 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.180132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.180162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.180199 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.284044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.284112 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.284133 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.284162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.284188 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.387400 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.387460 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.387479 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.387503 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.387521 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.490652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.490767 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.490876 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.490905 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.490926 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.593350 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.593452 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.593472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.593538 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.593557 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.696129 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.696203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.696221 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.696248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.696266 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.799496 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.799557 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.799575 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.799601 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.799618 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.902960 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.903031 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.903049 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.903075 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:26 crc kubenswrapper[4810]: I1009 00:07:26.903097 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:26Z","lastTransitionTime":"2025-10-09T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.006161 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.007002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.007052 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.007217 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.007238 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.110004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.110085 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.110110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.110138 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.110161 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.219030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.219082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.219110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.219136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.219155 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.253282 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.253334 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:27 crc kubenswrapper[4810]: E1009 00:07:27.253491 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.253569 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:27 crc kubenswrapper[4810]: E1009 00:07:27.253720 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:27 crc kubenswrapper[4810]: E1009 00:07:27.253882 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.253997 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:27 crc kubenswrapper[4810]: E1009 00:07:27.254081 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.322576 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.322618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.322629 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.322647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.322661 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.426426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.426562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.426581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.426607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.426626 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.529927 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.529990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.530003 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.530028 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.530047 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.634328 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.634412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.634436 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.634467 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.634491 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.738943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.739021 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.739040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.739071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.739098 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.841599 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.841654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.841667 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.841686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.841698 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.944643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.944744 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.944769 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.944807 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:27 crc kubenswrapper[4810]: I1009 00:07:27.944916 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:27Z","lastTransitionTime":"2025-10-09T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.048273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.048352 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.048375 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.048407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.048428 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.151891 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.151979 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.152008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.152041 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.152065 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.254011 4810 scope.go:117] "RemoveContainer" containerID="220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.255631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.255682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.255695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.255717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.255735 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.358067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.358101 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.358113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.358130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.358146 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.460432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.460483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.460498 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.460522 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.460535 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.563620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.563649 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.563659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.563673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.563681 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.600198 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/1.log" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.603202 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.603837 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.634355 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.651248 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.666082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.666144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.666157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.666176 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.666190 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.675988 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.708225 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.739268 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.762355 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.768236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.768286 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.768300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.768322 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.768335 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.777726 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.788984 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.805212 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.821456 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.834801 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.847710 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.861375 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.870490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.870537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.870548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.870566 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.870577 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.890714 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.904739 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.917681 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.932435 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:28Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.973809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.973916 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.973933 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.973981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:28 crc kubenswrapper[4810]: I1009 00:07:28.973997 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:28Z","lastTransitionTime":"2025-10-09T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.077932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.078008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.078021 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.078046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.078063 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.181045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.181088 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.181101 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.181120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.181134 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.253205 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:29 crc kubenswrapper[4810]: E1009 00:07:29.253415 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.253674 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.253900 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:29 crc kubenswrapper[4810]: E1009 00:07:29.254028 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.254267 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:29 crc kubenswrapper[4810]: E1009 00:07:29.254484 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:29 crc kubenswrapper[4810]: E1009 00:07:29.254525 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.284207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.284505 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.284640 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.284770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.284939 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.388397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.388445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.388458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.388477 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.388508 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.491397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.491458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.491479 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.491506 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.491524 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.595030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.595092 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.595111 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.595138 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.595157 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.611157 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/2.log" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.612537 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/1.log" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.616402 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d" exitCode=1 Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.616465 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.616518 4810 scope.go:117] "RemoveContainer" containerID="220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.617650 4810 scope.go:117] "RemoveContainer" containerID="9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d" Oct 09 00:07:29 crc kubenswrapper[4810]: E1009 00:07:29.617939 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.641296 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.658659 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.675641 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.695393 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.698347 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.698389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.698401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.698420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.698433 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.712794 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.731037 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.743045 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.762384 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.777175 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.791602 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.813064 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.813149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.813175 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.813209 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.813233 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.836607 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.867364 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.879965 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.892317 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.903074 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.915211 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.915271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.915291 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.915314 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.915332 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:29Z","lastTransitionTime":"2025-10-09T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.924647 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220f1727bdf477c4a880e0a85a3aeef33533e7efc1a95b446fd82ea171209034\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:14Z\\\",\\\"message\\\":\\\"k6g for pod on switch crc\\\\nI1009 00:07:14.481399 6297 services_controller.go:444] Built service openshift-controller-manager-operator/metrics LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF1009 00:07:14.481306 6297 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:14Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:14.481421 6297 services_controller.go:445] Built service openshift-controller-manager-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI1009 00:07:14.481448 6297 services_controller.go:451] Built service openshift-controller-manager-operator/metrics clu\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:29 crc kubenswrapper[4810]: I1009 00:07:29.937372 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.018635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.018720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.018742 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.018774 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.018797 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.122496 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.122544 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.122558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.122588 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.122604 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.226116 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.226173 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.226190 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.226215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.226233 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.329280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.329345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.329368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.329402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.329424 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.432532 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.432584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.432607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.432635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.432656 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.536187 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.536252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.536270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.536295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.536366 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.623429 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/2.log" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.628638 4810 scope.go:117] "RemoveContainer" containerID="9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d" Oct 09 00:07:30 crc kubenswrapper[4810]: E1009 00:07:30.629095 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.638903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.638950 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.638961 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.638981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.638994 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.643271 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.665120 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.681461 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.710279 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.730790 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.741388 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.741430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.741444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.741465 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.741479 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.756188 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.771694 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.788254 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.805229 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.825361 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.843942 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.843989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.844006 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.844030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.844048 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.844143 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.862292 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.881267 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.894648 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.913427 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.947746 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.947815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.947859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.947888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.947906 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:30Z","lastTransitionTime":"2025-10-09T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.950409 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:30 crc kubenswrapper[4810]: I1009 00:07:30.970260 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:30Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.051263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.051323 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.051343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.051367 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.051384 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.155287 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.155356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.155376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.155398 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.155413 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.253342 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:31 crc kubenswrapper[4810]: E1009 00:07:31.253556 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.253922 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:31 crc kubenswrapper[4810]: E1009 00:07:31.254057 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.254093 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.254376 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:31 crc kubenswrapper[4810]: E1009 00:07:31.254615 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:31 crc kubenswrapper[4810]: E1009 00:07:31.254888 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.258484 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.258523 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.258537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.258555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.258567 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.361683 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.361760 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.361780 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.361805 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.361852 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.464407 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.464480 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.464504 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.464535 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.464553 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.568061 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.568121 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.568139 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.568164 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.568184 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.670796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.670881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.670902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.670926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.670944 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.774285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.774347 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.774364 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.774390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.774409 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.877771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.877928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.877955 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.877986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.878007 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.981391 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.981456 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.981473 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.981498 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:31 crc kubenswrapper[4810]: I1009 00:07:31.981522 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:31Z","lastTransitionTime":"2025-10-09T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.037926 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.058497 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.083555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.083596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.083608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.083624 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.083637 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.089751 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.107131 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.120892 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.138734 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.155273 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.178596 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.186679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.186751 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.186771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.186798 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.186841 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.196657 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.211974 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.232777 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.248421 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.268911 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.287277 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.289665 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.289847 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.290124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.290441 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.290731 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.302062 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.321532 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.341197 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.359312 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:32Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.394472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.394679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.394806 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.394965 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.395098 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.499560 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.499929 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.500182 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.500444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.500651 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.604651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.604990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.605113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.605234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.605389 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.708409 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.708468 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.708491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.708514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.708531 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.811670 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.811714 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.811727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.811745 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.811759 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.915103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.915170 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.915189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.915212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:32 crc kubenswrapper[4810]: I1009 00:07:32.915229 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:32Z","lastTransitionTime":"2025-10-09T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.020142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.020209 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.020227 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.020253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.020271 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.044695 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.045125 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.045293 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:07:49.045237306 +0000 UTC m=+66.570876037 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.123938 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.123999 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.124020 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.124046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.124070 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.227064 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.227950 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.228097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.228236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.228897 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.253076 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.253127 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.253084 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.253286 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.253467 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.253627 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.253672 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.253885 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.257448 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.257509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.257529 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.257551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.257569 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.274463 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.278685 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.283727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.283796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.283848 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.283883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.283907 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.305009 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.310065 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.310128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.310152 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.310183 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.310205 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.311218 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.331943 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.333017 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.340309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.340402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.340426 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.340456 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.340481 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.348766 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.362194 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.366947 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.367009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.367030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.367055 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.367072 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.369573 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.380869 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: E1009 00:07:33.381033 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.382555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.382592 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.382604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.382624 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.382637 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.382722 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.403692 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.418804 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.431585 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.449731 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.463155 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.479932 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.484977 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.485054 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.485068 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.485115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.485129 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.495303 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.510220 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.523339 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.541547 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.563696 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:33Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.587695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.587724 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.587734 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.587750 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.587759 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.690530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.690576 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.690588 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.690605 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.690618 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.794204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.794290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.794313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.794342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.794360 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.896767 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.896879 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.896899 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.896928 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:33 crc kubenswrapper[4810]: I1009 00:07:33.896947 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:33Z","lastTransitionTime":"2025-10-09T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.000174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.000228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.000244 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.000271 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.000290 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.103567 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.103658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.103681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.103710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.103730 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.207212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.207885 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.208082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.208230 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.208354 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.312234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.312290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.312308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.312333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.312348 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.415562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.415628 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.415647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.415675 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.415696 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.519623 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.519717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.519750 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.519782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.519805 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.622475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.622534 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.622552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.622621 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.622642 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.726180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.726245 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.726266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.726297 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.726320 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.828710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.828764 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.828773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.828795 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.828806 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.931935 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.932008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.932037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.932067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.932089 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:34Z","lastTransitionTime":"2025-10-09T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:34 crc kubenswrapper[4810]: I1009 00:07:34.971617 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:07:34 crc kubenswrapper[4810]: E1009 00:07:34.971811 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:08:06.971776325 +0000 UTC m=+84.497415076 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.037405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.037449 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.037493 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.037518 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.037587 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.047185 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.060863 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.062702 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.072687 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.072758 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.072787 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.072934 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.072984 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:08:07.072928208 +0000 UTC m=+84.598566919 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073014 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:08:07.07300061 +0000 UTC m=+84.598639331 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.072803 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073051 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073127 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073157 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073225 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:08:07.073205756 +0000 UTC m=+84.598844507 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.073107 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073272 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073323 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073335 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.073405 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:08:07.07336783 +0000 UTC m=+84.599006541 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.076720 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.097173 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.113697 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.128104 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.141124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.141202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.141218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.141243 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.141259 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.151950 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.167726 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.181968 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.198986 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.212718 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.224240 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.237387 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.243524 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.243586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.243604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.243629 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.243649 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.249407 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.253298 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.253379 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.253445 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.253658 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.253758 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.253873 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.254107 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:35 crc kubenswrapper[4810]: E1009 00:07:35.254647 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.268090 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.287130 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.301713 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.316592 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:35Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.347415 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.347479 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.347489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.347505 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.347516 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.451252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.451320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.451338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.451364 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.451384 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.554150 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.554219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.554237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.554262 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.554281 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.657012 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.657079 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.657104 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.657128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.657147 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.760713 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.760766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.760785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.760809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.760901 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.864592 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.864647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.864666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.864692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.864709 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.968119 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.968184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.968202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.968229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:35 crc kubenswrapper[4810]: I1009 00:07:35.968372 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:35Z","lastTransitionTime":"2025-10-09T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.071676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.071757 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.071790 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.071862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.071888 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.175276 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.175339 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.175357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.175386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.175404 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.278184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.278270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.278297 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.278326 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.278349 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.381275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.381336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.381349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.381369 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.381382 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.485174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.485228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.485248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.485274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.485294 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.590264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.590336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.590371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.590403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.590424 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.694170 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.694260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.694282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.694307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.694324 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.797185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.797236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.797254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.797277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.797293 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.900040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.900108 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.900135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.900160 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:36 crc kubenswrapper[4810]: I1009 00:07:36.900177 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:36Z","lastTransitionTime":"2025-10-09T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.003421 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.003501 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.003525 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.003552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.003572 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.106499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.106580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.106613 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.106643 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.106663 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.209027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.209121 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.209150 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.209192 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.209215 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.253200 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.253306 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.253376 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:37 crc kubenswrapper[4810]: E1009 00:07:37.253390 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:37 crc kubenswrapper[4810]: E1009 00:07:37.253481 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.253719 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:37 crc kubenswrapper[4810]: E1009 00:07:37.253691 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:37 crc kubenswrapper[4810]: E1009 00:07:37.253934 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.310954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.310987 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.310995 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.311007 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.311016 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.412981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.413040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.413057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.413081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.413098 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.516168 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.516216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.516232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.516253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.516270 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.619510 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.619586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.619602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.619626 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.619644 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.722792 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.722921 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.722948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.722975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.722992 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.825773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.825859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.825882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.825915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.825939 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.928877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.928943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.928964 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.928987 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:37 crc kubenswrapper[4810]: I1009 00:07:37.929006 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:37Z","lastTransitionTime":"2025-10-09T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.032746 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.032876 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.032901 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.032930 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.032951 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.136757 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.136908 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.136981 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.137008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.137026 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.239489 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.239559 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.239582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.239676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.239703 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.342263 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.342314 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.342332 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.342355 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.342372 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.446308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.446363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.446383 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.446411 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.446429 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.549333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.549396 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.549415 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.549443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.549462 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.653290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.653386 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.653409 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.653443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.653478 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.756904 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.757001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.757021 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.757049 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.757068 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.861027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.861400 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.861430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.861504 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.861528 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.964284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.964341 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.964360 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.964385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:38 crc kubenswrapper[4810]: I1009 00:07:38.964402 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:38Z","lastTransitionTime":"2025-10-09T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.068103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.068180 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.068204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.068235 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.068257 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.170798 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.170895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.170924 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.170953 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.170973 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.253427 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.253456 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.253509 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:39 crc kubenswrapper[4810]: E1009 00:07:39.253649 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.253749 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:39 crc kubenswrapper[4810]: E1009 00:07:39.254003 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:39 crc kubenswrapper[4810]: E1009 00:07:39.254236 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:39 crc kubenswrapper[4810]: E1009 00:07:39.254408 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.274177 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.274218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.274234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.274257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.274275 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.377868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.377962 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.378014 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.378041 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.378058 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.481303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.481378 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.481397 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.481424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.481440 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.585229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.585295 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.585312 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.585338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.585356 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.688057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.688127 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.688145 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.688170 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.688191 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.794855 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.794913 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.794926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.794945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.794961 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.898162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.898254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.898267 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.898284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:39 crc kubenswrapper[4810]: I1009 00:07:39.898296 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:39Z","lastTransitionTime":"2025-10-09T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.001867 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.001923 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.001943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.001967 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.001986 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.105157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.105228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.105252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.105282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.105302 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.208434 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.208519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.208537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.208560 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.208606 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.311097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.311150 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.311174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.311203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.311224 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.414919 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.414973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.414991 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.415014 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.415034 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.518085 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.518676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.518771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.518881 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.518974 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.621428 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.621453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.621462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.621475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.621482 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.724666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.724779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.724798 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.724855 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.724894 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.841556 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.841613 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.841630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.841654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.841669 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.944715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.944778 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.944800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.944862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:40 crc kubenswrapper[4810]: I1009 00:07:40.944888 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:40Z","lastTransitionTime":"2025-10-09T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.048532 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.048607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.048631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.048670 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.048695 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.154709 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.154770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.154791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.154852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.154876 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.253129 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:41 crc kubenswrapper[4810]: E1009 00:07:41.253328 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.253701 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.253993 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:41 crc kubenswrapper[4810]: E1009 00:07:41.254171 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.254429 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:41 crc kubenswrapper[4810]: E1009 00:07:41.254764 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:41 crc kubenswrapper[4810]: E1009 00:07:41.254923 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.257464 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.257554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.257580 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.257618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.257660 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.360926 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.360991 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.361009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.361039 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.361078 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.463812 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.464105 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.464185 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.464266 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.464349 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.567284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.568153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.568526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.568681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.568860 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.671948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.672236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.672331 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.672412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.672486 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.775203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.775252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.775270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.775294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.775311 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.879339 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.879394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.879410 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.879434 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.879454 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.982252 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.982347 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.982366 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.982392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:41 crc kubenswrapper[4810]: I1009 00:07:41.982414 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:41Z","lastTransitionTime":"2025-10-09T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.085453 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.085583 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.085605 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.085632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.085648 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.188674 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.188728 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.188745 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.188771 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.188789 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.291637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.291700 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.291717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.291743 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.291760 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.394630 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.394709 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.394729 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.394754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.394771 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.497717 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.497776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.497796 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.497884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.497904 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.601417 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.601478 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.601495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.601519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.601536 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.705165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.705233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.705242 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.705257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.705266 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.808035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.808093 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.808110 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.808134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.808152 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.911207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.911267 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.911284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.911308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:42 crc kubenswrapper[4810]: I1009 00:07:42.911325 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:42Z","lastTransitionTime":"2025-10-09T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.014472 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.014544 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.014562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.014588 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.014609 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.117316 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.117384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.117402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.117429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.117449 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.220137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.220193 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.220210 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.220233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.220252 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.252890 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.252980 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.252993 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.253119 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.253224 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.253353 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.253655 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.253801 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.278390 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.295125 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.314136 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.323870 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.323917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.323932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.323953 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.323968 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.332237 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.352192 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.371433 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.391582 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.413539 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.426399 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.426492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.426511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.426567 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.426586 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.432263 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.466245 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.486507 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.503554 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.523288 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.529619 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.529692 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.529711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.529736 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.529752 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.544301 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.577412 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.592698 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.592746 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.592758 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.592781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.592796 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.596024 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.607707 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.608373 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.611762 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.611803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.611815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.611862 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.611878 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.625789 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.629392 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.633130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.633191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.633203 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.633236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.633249 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.648968 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.652376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.652405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.652415 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.652430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.652444 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.666281 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.670277 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.670340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.670353 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.670374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.670388 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.687803 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:43Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:43Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:43 crc kubenswrapper[4810]: E1009 00:07:43.687996 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.689878 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.689914 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.689925 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.689941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.689953 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.792424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.792493 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.792514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.792541 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.792558 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.895636 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.895757 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.895777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.895801 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.895846 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.998483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.998558 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.998581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.998609 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:43 crc kubenswrapper[4810]: I1009 00:07:43.998631 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:43Z","lastTransitionTime":"2025-10-09T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.103418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.103481 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.103500 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.103527 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.103546 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.208348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.208710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.208735 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.208766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.208789 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.312111 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.312144 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.312177 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.312195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.312206 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.414756 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.414799 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.414856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.414874 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.414910 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.519212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.519332 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.519359 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.519392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.519415 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.622193 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.622237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.622248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.622265 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.622276 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.725125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.725191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.725208 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.725236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.725253 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.828892 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.829021 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.829047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.829074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.829091 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.932209 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.932288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.932311 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.932343 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:44 crc kubenswrapper[4810]: I1009 00:07:44.932366 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:44Z","lastTransitionTime":"2025-10-09T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.035176 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.035220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.035234 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.035254 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.035268 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.138466 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.138635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.138788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.138876 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.138910 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.242336 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.242395 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.242418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.242445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.242462 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.253875 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.253916 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.253997 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:45 crc kubenswrapper[4810]: E1009 00:07:45.254160 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:45 crc kubenswrapper[4810]: E1009 00:07:45.254339 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.254368 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:45 crc kubenswrapper[4810]: E1009 00:07:45.254486 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:45 crc kubenswrapper[4810]: E1009 00:07:45.254599 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.256333 4810 scope.go:117] "RemoveContainer" containerID="9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d" Oct 09 00:07:45 crc kubenswrapper[4810]: E1009 00:07:45.256923 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.345422 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.345490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.345511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.345542 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.345559 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.447930 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.447993 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.448009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.448044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.448061 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.550871 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.550934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.550953 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.550985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.551005 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.654488 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.654555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.654572 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.654598 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.654617 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.756686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.756747 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.756766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.756791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.756810 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.860067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.860128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.860143 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.860160 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.860173 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.963705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.963759 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.963770 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.963786 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:45 crc kubenswrapper[4810]: I1009 00:07:45.963814 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:45Z","lastTransitionTime":"2025-10-09T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.066585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.066637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.066654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.066678 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.066694 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.169800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.169940 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.169959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.169985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.170002 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.273004 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.273045 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.273055 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.273090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.273102 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.376170 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.377815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.378562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.378685 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.378924 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.481734 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.482143 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.482253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.482354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.482473 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.585177 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.585257 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.585282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.585311 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.585329 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.690188 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.690237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.690255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.690281 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.690300 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.793380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.793442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.793464 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.793495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.793521 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.896813 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.896868 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.896890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.896927 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.896940 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:46Z","lastTransitionTime":"2025-10-09T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:46 crc kubenswrapper[4810]: I1009 00:07:46.999712 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.000115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.000307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.000519 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.000813 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.104327 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.104711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.105071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.105262 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.105412 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.209140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.209202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.209220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.209249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.209265 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.254028 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.254061 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.254069 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.254192 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:47 crc kubenswrapper[4810]: E1009 00:07:47.254359 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:47 crc kubenswrapper[4810]: E1009 00:07:47.254488 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:47 crc kubenswrapper[4810]: E1009 00:07:47.254652 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:47 crc kubenswrapper[4810]: E1009 00:07:47.254841 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.312892 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.312951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.312970 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.312998 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.313015 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.417225 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.417270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.417281 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.417298 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.417312 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.520372 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.520852 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.521504 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.522163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.522671 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.625288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.625323 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.625333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.625348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.625359 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.728178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.728294 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.728309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.728324 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.728335 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.831050 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.831458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.831610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.831890 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.832083 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.934856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.934914 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.934934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.934959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:47 crc kubenswrapper[4810]: I1009 00:07:47.934976 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:47Z","lastTransitionTime":"2025-10-09T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.038521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.038769 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.038864 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.039082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.039159 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.141308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.141356 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.141376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.141626 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.141647 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.244931 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.245429 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.245617 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.245766 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.245938 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.390357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.390402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.390411 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.390428 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.390438 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.493191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.493274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.493291 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.493318 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.493334 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.595574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.595659 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.595684 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.595715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.595738 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.698437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.698514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.698533 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.698557 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.698575 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.801995 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.802072 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.802096 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.802131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.802153 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.904651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.904685 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.904693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.904707 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:48 crc kubenswrapper[4810]: I1009 00:07:48.904726 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:48Z","lastTransitionTime":"2025-10-09T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.006887 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.006924 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.006934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.006951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.006961 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.109645 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.110091 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.110249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.110392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.110531 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.140179 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:49 crc kubenswrapper[4810]: E1009 00:07:49.140374 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:49 crc kubenswrapper[4810]: E1009 00:07:49.140446 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:08:21.140428729 +0000 UTC m=+98.666067430 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.213131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.213162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.213170 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.213188 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.213207 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.253179 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.253209 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.253196 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.253289 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:49 crc kubenswrapper[4810]: E1009 00:07:49.253446 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:49 crc kubenswrapper[4810]: E1009 00:07:49.253534 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:49 crc kubenswrapper[4810]: E1009 00:07:49.253594 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:49 crc kubenswrapper[4810]: E1009 00:07:49.253652 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.314952 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.314990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.315003 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.315020 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.315032 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.417406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.417451 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.417462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.417477 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.417489 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.519382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.519412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.519423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.519438 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.519448 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.622053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.622111 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.622135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.622162 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.622183 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.724394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.724444 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.724464 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.724488 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.724508 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.826312 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.826382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.826404 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.826433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.826458 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.928564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.928610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.928627 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.928649 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:49 crc kubenswrapper[4810]: I1009 00:07:49.928665 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:49Z","lastTransitionTime":"2025-10-09T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.031715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.031762 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.031779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.031804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.031858 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.134001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.134040 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.134049 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.134065 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.134074 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.236972 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.237026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.237042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.237067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.237084 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.340327 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.340373 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.340385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.340401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.340413 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.444495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.444742 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.444763 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.444788 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.444806 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.547686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.547769 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.547793 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.547853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.547877 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.651587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.651651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.651673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.651704 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.651726 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.705940 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/0.log" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.705992 4810 generic.go:334] "Generic (PLEG): container finished" podID="8e9fffc1-16a6-4108-978b-6e85bdfd9c4f" containerID="f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095" exitCode=1 Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.706025 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerDied","Data":"f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.706556 4810 scope.go:117] "RemoveContainer" containerID="f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.721069 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.745701 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.754124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.754335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.754463 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.754606 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.754743 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.762747 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.779161 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.794809 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.810727 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.824343 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.840574 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.850441 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.859443 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.859492 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.859508 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.859531 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.859548 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.862655 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.872496 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.884725 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.940858 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.953342 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.961888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.961947 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.961956 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.961972 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.961981 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:50Z","lastTransitionTime":"2025-10-09T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.972635 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:50 crc kubenswrapper[4810]: I1009 00:07:50.989270 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.000452 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:50Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.014045 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.064437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.064480 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.064490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.064507 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.064519 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.167538 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.167634 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.167652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.167678 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.167701 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.254037 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.254155 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.254184 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:51 crc kubenswrapper[4810]: E1009 00:07:51.254314 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.254366 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:51 crc kubenswrapper[4810]: E1009 00:07:51.254520 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:51 crc kubenswrapper[4810]: E1009 00:07:51.254545 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:51 crc kubenswrapper[4810]: E1009 00:07:51.254680 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.270530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.270572 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.270585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.270602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.270623 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.372627 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.372713 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.372732 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.372759 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.372776 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.475491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.475530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.475539 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.475554 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.475563 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.577857 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.577941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.577961 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.577988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.578006 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.680540 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.680582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.680590 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.680608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.680620 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.712019 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/0.log" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.712081 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerStarted","Data":"58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.722411 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.732932 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.746148 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.758184 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.768781 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.779262 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.787718 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.787754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.787767 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.787786 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.787797 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.789907 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.803219 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.815595 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.835448 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.850622 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.864149 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.879222 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.899751 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.899804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.899813 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.899842 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.899854 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:51Z","lastTransitionTime":"2025-10-09T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.924744 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.953209 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.967663 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.980912 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:51 crc kubenswrapper[4810]: I1009 00:07:51.992263 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:51Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.001730 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.001765 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.001774 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.001789 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.001798 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.104588 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.104627 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.104635 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.104651 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.104660 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.207282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.207387 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.207406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.207433 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.207455 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.309518 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.309556 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.309565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.309579 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.309590 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.411957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.411999 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.412009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.412024 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.412034 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.514966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.515082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.515382 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.515420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.515432 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.618551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.618590 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.618604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.618620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.618631 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.720581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.720620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.720631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.720647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.720658 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.823212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.823255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.823267 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.823282 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.823292 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.926744 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.926791 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.926800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.926813 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:52 crc kubenswrapper[4810]: I1009 00:07:52.926833 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:52Z","lastTransitionTime":"2025-10-09T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.029275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.029319 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.029329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.029345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.029357 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.132147 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.132210 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.132233 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.132260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.132281 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.235285 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.235329 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.235338 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.235358 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.235376 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.252879 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.252906 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.252932 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.252921 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.252990 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.253076 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.253186 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.253262 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.275383 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.296541 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.330018 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.337949 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.338000 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.338018 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.338042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.338059 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.345053 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.362733 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.377745 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.392039 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.412757 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.428364 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.438766 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.440289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.440328 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.440340 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.440357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.440369 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.457230 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.469022 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.484203 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.495858 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.505552 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.518805 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.531057 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.542087 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.542115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.542125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.542140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.542150 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.545414 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.643545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.643582 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.643594 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.643608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.643618 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.746020 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.746071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.746083 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.746101 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.746112 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.834853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.835243 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.835399 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.835811 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.836023 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.857149 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.862256 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.862354 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.862374 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.862400 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.862418 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.877944 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.883134 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.883300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.883385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.883458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.883543 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.895559 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.900610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.900661 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.900679 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.900704 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.900726 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.912688 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.917197 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.917272 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.917290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.917313 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.917424 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.931783 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:53Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:53 crc kubenswrapper[4810]: E1009 00:07:53.931957 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.933572 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.933617 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.933634 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.933654 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:53 crc kubenswrapper[4810]: I1009 00:07:53.933669 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:53Z","lastTransitionTime":"2025-10-09T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.036423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.036456 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.036465 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.036479 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.036488 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.139632 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.139695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.139719 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.139750 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.139766 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.241345 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.241380 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.241388 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.241401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.241410 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.344167 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.344204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.344213 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.344228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.344238 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.447577 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.447894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.447990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.448074 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.448151 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.550986 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.551035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.551049 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.551067 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.551079 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.653925 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.653985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.654002 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.654027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.654045 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.756293 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.756337 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.756348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.756368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.756378 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.859016 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.859047 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.859056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.859072 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.859083 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.962218 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.962273 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.962289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.962312 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:54 crc kubenswrapper[4810]: I1009 00:07:54.962328 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:54Z","lastTransitionTime":"2025-10-09T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.065463 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.065528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.065553 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.065585 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.065609 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.168574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.168618 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.168631 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.168649 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.168661 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.253362 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.253403 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.253416 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.253473 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:55 crc kubenswrapper[4810]: E1009 00:07:55.253580 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:55 crc kubenswrapper[4810]: E1009 00:07:55.253741 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:55 crc kubenswrapper[4810]: E1009 00:07:55.253853 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:55 crc kubenswrapper[4810]: E1009 00:07:55.253917 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.271695 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.271748 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.271761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.271782 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.271796 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.373686 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.373728 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.373740 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.373759 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.373771 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.476416 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.476458 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.476470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.476485 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.476495 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.579042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.579119 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.579130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.579146 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.579156 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.681541 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.681584 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.681595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.681610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.681621 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.784095 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.784135 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.784148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.784165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.784223 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.887169 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.887212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.887222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.887236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.887246 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.989364 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.989424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.989435 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.989452 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:55 crc kubenswrapper[4810]: I1009 00:07:55.989461 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:55Z","lastTransitionTime":"2025-10-09T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.091999 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.092058 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.092068 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.092082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.092090 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.194502 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.194572 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.194607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.194625 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.194636 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.296805 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.296854 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.296866 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.296882 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.296893 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.399915 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.399967 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.399985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.400005 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.400019 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.502699 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.502754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.502777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.502798 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.502813 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.605384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.605432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.605445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.605465 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.605476 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.708517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.708578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.708594 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.708614 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.708629 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.811599 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.811646 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.811658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.811680 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.811693 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.914657 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.914693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.914701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.914715 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:56 crc kubenswrapper[4810]: I1009 00:07:56.914724 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:56Z","lastTransitionTime":"2025-10-09T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.017608 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.017780 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.017804 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.017872 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.017890 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.121873 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.121951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.121975 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.122003 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.122024 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.224989 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.225034 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.225046 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.225064 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.225076 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.253125 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.253244 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.253475 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:57 crc kubenswrapper[4810]: E1009 00:07:57.253470 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.253480 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:57 crc kubenswrapper[4810]: E1009 00:07:57.253942 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:57 crc kubenswrapper[4810]: E1009 00:07:57.254048 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:57 crc kubenswrapper[4810]: E1009 00:07:57.254116 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.265308 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.327592 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.327672 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.327696 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.327727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.327749 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.429781 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.429860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.429877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.429899 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.429915 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.532586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.532689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.532725 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.532761 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.532785 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.635834 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.635884 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.635896 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.635916 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.635928 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.738811 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.738865 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.738877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.738892 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.738904 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.842082 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.842119 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.842132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.842148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.842159 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.944229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.944264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.944275 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.944292 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:57 crc kubenswrapper[4810]: I1009 00:07:57.944303 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:57Z","lastTransitionTime":"2025-10-09T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.046663 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.046689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.046698 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.046710 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.046719 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.150032 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.150098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.150119 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.150149 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.150166 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.253157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.253247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.253264 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.253288 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.253305 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.254506 4810 scope.go:117] "RemoveContainer" containerID="9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.355615 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.355650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.355661 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.355677 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.355688 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.461103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.461178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.461197 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.461221 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.461240 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.566385 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.566427 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.566442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.566465 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.566479 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.669200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.669238 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.669247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.669262 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.669272 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.734122 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/2.log" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.736692 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.737358 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.754868 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81a6d960-0fa0-42d6-9927-34e68d1784f4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e58cdd9f168937a38507c024471b5f08f253cd37b58a02ebc4e7e9622d5b44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.769950 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.771604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.771637 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.771647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.771662 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.771671 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.782277 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.793187 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.807577 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.819676 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.832561 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.847032 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.860170 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.874909 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.874941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.874952 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.874969 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.874981 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.877644 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.889451 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.907745 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.920903 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.931519 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.946248 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.962067 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.977184 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.977232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.977247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.977284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.977302 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:58Z","lastTransitionTime":"2025-10-09T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:58 crc kubenswrapper[4810]: I1009 00:07:58.986327 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:58Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.002790 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.013588 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.080001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.080041 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.080053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.080072 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.080083 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.182907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.182959 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.182977 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.183001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.183018 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.254693 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.254728 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.254737 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.254760 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:07:59 crc kubenswrapper[4810]: E1009 00:07:59.255012 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:07:59 crc kubenswrapper[4810]: E1009 00:07:59.255085 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:07:59 crc kubenswrapper[4810]: E1009 00:07:59.255231 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:07:59 crc kubenswrapper[4810]: E1009 00:07:59.255273 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.285411 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.285473 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.285486 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.285499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.285512 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.389902 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.389938 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.389950 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.389966 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.389978 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.493815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.493925 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.493944 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.493980 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.494003 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.597142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.597198 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.597215 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.597240 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.597257 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.700506 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.700604 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.700626 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.700653 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.700671 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.743585 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/3.log" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.744691 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/2.log" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.748798 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" exitCode=1 Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.748885 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.748978 4810 scope.go:117] "RemoveContainer" containerID="9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.750126 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:07:59 crc kubenswrapper[4810]: E1009 00:07:59.750554 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.774210 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.791149 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.804607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.804687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.804713 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.804743 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.804766 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.812764 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.831136 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.867618 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.889191 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.905541 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.910581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.910641 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.910666 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.910736 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.910761 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:07:59Z","lastTransitionTime":"2025-10-09T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.926980 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.946220 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:07:59 crc kubenswrapper[4810]: I1009 00:07:59.980420 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9280d8cd8cfa540830c1d1dbd1aa1f552a334c1badd022cb446ee6c2a749de3d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:29Z\\\",\\\"message\\\":\\\".go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:29Z is after 2025-08-24T17:21:41Z]\\\\nI1009 00:07:29.332278 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1009 00:07:29.332290 6517 obj_retry.go:303] Retry object setup: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332298 6517 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj\\\\nI1009 00:07:29.332305 6517 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-sd2lj in node crc\\\\nI1009 00:07:29.332311 6517 obj_retry.go:386] Ret\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:28Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:59Z\\\",\\\"message\\\":\\\" include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:openshift-controller-manager-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007705ecb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: openshift-controller-manager-operator,},ClusterIP:10.217.5.58,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1009 00:07:59.206986 6876 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:07:59.999809 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81a6d960-0fa0-42d6-9927-34e68d1784f4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e58cdd9f168937a38507c024471b5f08f253cd37b58a02ebc4e7e9622d5b44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:07:59Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.013615 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.013668 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.013687 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.013720 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.013739 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.027336 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.046050 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.067105 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.084313 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.100135 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.115917 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.117186 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.117231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.117248 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.117274 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.117291 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.137345 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.151751 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.221087 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.221177 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.221224 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.221249 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.221266 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.332951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.333008 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.333057 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.333081 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.333127 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.436409 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.436459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.436476 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.436499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.436517 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.540238 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.540332 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.540350 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.540406 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.540423 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.643620 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.644037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.644089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.644123 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.644145 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.748153 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.748201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.748222 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.748251 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.748272 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.755181 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/3.log" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.761367 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:08:00 crc kubenswrapper[4810]: E1009 00:08:00.761628 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.783598 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.801384 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.822918 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.841776 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.851934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.852001 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.852026 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.852056 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.852082 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.874223 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:59Z\\\",\\\"message\\\":\\\" include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:openshift-controller-manager-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007705ecb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: openshift-controller-manager-operator,},ClusterIP:10.217.5.58,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1009 00:07:59.206986 6876 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.892220 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81a6d960-0fa0-42d6-9927-34e68d1784f4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e58cdd9f168937a38507c024471b5f08f253cd37b58a02ebc4e7e9622d5b44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.911227 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.931802 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.951302 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.954765 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.954808 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.954869 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.954894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.954910 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:00Z","lastTransitionTime":"2025-10-09T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.966644 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:00 crc kubenswrapper[4810]: I1009 00:08:00.982159 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:00Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.005071 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.058044 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.058090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.058104 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.058124 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.058140 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.058784 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.071439 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.085044 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.095712 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.107938 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.120204 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.140565 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:01Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.160467 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.160514 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.160526 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.160543 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.160555 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.253810 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.253870 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.253954 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.254185 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:01 crc kubenswrapper[4810]: E1009 00:08:01.254282 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:01 crc kubenswrapper[4810]: E1009 00:08:01.254348 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:01 crc kubenswrapper[4810]: E1009 00:08:01.254444 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:01 crc kubenswrapper[4810]: E1009 00:08:01.254503 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.263132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.263191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.263364 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.263387 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.263407 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.366773 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.366859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.366878 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.366903 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.366921 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.470530 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.470616 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.470638 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.470664 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.470693 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.574420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.574497 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.574536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.574568 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.574590 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.678216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.678287 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.678308 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.678341 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.678360 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.782910 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.782988 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.783009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.783036 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.783056 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.886077 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.886148 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.886186 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.886219 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.886242 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.989095 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.989158 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.989176 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.989200 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:01 crc kubenswrapper[4810]: I1009 00:08:01.989217 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:01Z","lastTransitionTime":"2025-10-09T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.092126 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.092202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.092228 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.092258 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.092281 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.196237 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.196283 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.196298 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.196319 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.196334 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.299587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.299995 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.300321 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.300497 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.300696 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.403469 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.403547 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.403571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.403602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.403622 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.507027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.507100 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.507118 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.507146 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.507164 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.610419 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.610732 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.610946 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.611103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.611236 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.715269 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.715598 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.715787 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.716030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.716174 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.819303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.819333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.819360 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.819375 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.819384 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.922681 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.922751 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.922775 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.922806 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:02 crc kubenswrapper[4810]: I1009 00:08:02.922861 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:02Z","lastTransitionTime":"2025-10-09T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.026737 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.026803 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.026859 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.026885 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.026902 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.130427 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.130474 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.130482 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.130497 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.130506 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.233293 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.233358 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.233376 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.233405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.233424 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.252784 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:03 crc kubenswrapper[4810]: E1009 00:08:03.252976 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.253045 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.253116 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:03 crc kubenswrapper[4810]: E1009 00:08:03.253229 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.253330 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:03 crc kubenswrapper[4810]: E1009 00:08:03.253514 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:03 crc kubenswrapper[4810]: E1009 00:08:03.253673 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.272422 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"81a6d960-0fa0-42d6-9927-34e68d1784f4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e58cdd9f168937a38507c024471b5f08f253cd37b58a02ebc4e7e9622d5b44b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4357baf0204f9bb17082adb7ee7cab509461c586a2f1b1f5122d63b0c53e1303\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.299223 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-lzf84" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12a20549-7332-4c4f-b63a-38afc78107e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca7003d04049f84f6b3878c12a6e48589bcaf03f64cc275e644585f473484d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dd6a146d93a063b4d24bbb1e7b2fb2ce4c7b4249b4c84a646454329c4f9d69de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ff9cf153e789061ef1a5a742a76937cc37a1e1cd5bcceebb6ef662582d7603\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://764f962d3eccd9b99437507daf5592eb08c18dc76f74d52aec834b11141c2f6e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://60b5c68dbddf2142a12c9e47e03d1a4ba2d1e3ef1d15853d7d81b27ffb8e34fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://860ecce51a4cb1e9d14a22533c2663998b85ace32191d2c92f2a1b10c8d4ea69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ceeaab8b72e61d6258c1321431ed052313e7963c8bfafafe1bc7ce592b8a18d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cbsf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-lzf84\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.317211 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b3ee98c8-29c6-4162-beca-d8abe019a814\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5e880fe0fac9212d975c4feb61c34a704d1f4648935a4e0e40ef0f0f3f0e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b5ed72d241d6596cc1ac5c8f1dffb707e94b90f7dd67ce0361152bf45fbfeb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n7rxj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xsv9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.336560 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.336638 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.336662 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.336693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.336716 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.341001 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae8145e5-f86f-4e89-85d1-e4f87690e40a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4ce5f2cdd121b6399a5a1dd63f3a62c9a7942702c9a20621914f34f095b385a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af6c4584c0b946cfa6cee710ad61436d36362f3446400f30c47a73940bd43a6e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b2b8c6d0d873d8d2fc3ec9e492528f56ab5305fed16e6a4553c82168355d99a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b140b829a2384abbb7f1eb7bfd4ecb638225bf165c4f3253f2563977b760f2a2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e583c6f11ad3f4bd8a67691548580c19e2b2ffd55c3e7a16785b6706080e732c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"le observer\\\\nW1009 00:07:03.012468 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1009 00:07:03.012590 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 00:07:03.013611 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2381482131/tls.crt::/tmp/serving-cert-2381482131/tls.key\\\\\\\"\\\\nI1009 00:07:03.347449 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 00:07:03.350496 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 00:07:03.350521 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 00:07:03.350548 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 00:07:03.350554 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 00:07:03.356224 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1009 00:07:03.356244 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1009 00:07:03.356253 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356300 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 00:07:03.356315 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 00:07:03.356319 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 00:07:03.356322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 00:07:03.356325 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1009 00:07:03.357466 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:57Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01ae5304af238185455829bebc75ba15174bbef1fee2560ead4a4b66edbcf2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc418651e3f1435575917789b66f02662f61d203b5e9174a626512ea55a91923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.364938 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.383541 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-bw4pj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c3f6a1ff-10fd-446e-9790-f13f432d1f50\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://799511e6cb0fd5db23899d3fd5291b86bfe5018505b459cce11733fdf96bec8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fxjpz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-bw4pj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.402498 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://faa26aada8ad1ac8ec5bdd434209daaf990cb91437867559bb6007bc58908629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3597f7704592a2a3ae8936804568e2958dc700f15ad3e01a3eaea89b0fd5777f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.416124 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e7b43917-aa65-43dc-b71b-7de0af71d3f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7355a518d1866b6bb1ef4b51e93ac6e6e3182c80b84d5546f244c3ee135ce56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z6229\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-6752w\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.431735 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.439536 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.439612 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.439629 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.439650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.439694 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.449946 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cc133cdb-1ac6-47e8-b21d-4001de03ae6e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d93d5c9f2b224d2a61d05a1340bd6ec38f32bc11f700b93046066b41430afde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0edb442b77145445d07b40c21b4c24a55ab097e597dcf9fd9d3d32684ff534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://63ea56bb00df8bce871c0dcdfb14cd7e6b4b83befbb3464e4d349d319c9b504e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://42473623c30955e17ff5dd5e31c162479c81dbfc941a2ba07b9b27a1b412ef16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.464994 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99415919-f08d-4d08-bec9-62c1a1aee203\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://23e58f73379b2c1e86e81fcaf29d277dce634e26aea651af47a7cf3a516c86e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed88b25b12948f311394e91e57409b547d3845218f9e145c782aa5b99bddfa1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0572c1cd30c147fdbbc975ab5da91d054af17cd5d04b49e0488eec633d9dfa66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://947c6e26ffd7fcc468c9f1f92162311f8c52d88260464775321b90a689fd212c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.493987 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"520575d5-8f39-4826-b90c-4d6b6a3d4eef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:06:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e69b4954052f594258b8b129f014805fb7d60490da5733830397fd1e021f098c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9fd7a1f46e715801a31d05358bc30227a0a25cf68c19752ac1154b0d28cc8336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://30a0ab01abfc0078c0542860897e2e7ce0766e4e051e59f7da23d6631e633e20\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebd9b944e87526d01d4a7e760a021bf6f2f422b5080431e27974b0b1fbe12884\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2e71a40873c4f5128f20fa35eff4c243858d4e607efda442a8c8cca90efc73dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:06:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6fdcaa515b8050bfbccbf56d88054b96f2b4466537457e1157caf5e98602a10c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c64354621a60a6cc904a016126e415946747e10590208b02d5f409be984aa759\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e992256c704467e2726e9ac4b469ce3f611ca7b75353775e4e3b3819a824c32\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:06:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:06:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:06:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.508427 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.524862 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-5qcmj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c70efaa9-20ad-45fe-af4f-a068e313dad3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://700834a93a65b3a40b3abcafbab2e321bddf55490e336d7073ce1f8836aa5f41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-246vj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:04Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-5qcmj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.548813 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d25a09f6427953f411f97680ba228e4a01eaa2b511120a89ae4ca220eddf82e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.557785 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.557877 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.557896 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.557919 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.557941 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.578969 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc47e20f2a8f7a68695481c5c1902537674876601cbc5e3766844bdee7605dbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.600060 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bfb7a412-4af9-4aa0-a3e8-d46dab040385\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:59Z\\\",\\\"message\\\":\\\" include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:openshift-controller-manager-operator-serving-cert service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc007705ecb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:https,Protocol:TCP,Port:443,TargetPort:{0 8443 },NodePort:0,AppProtocol:nil,},},Selector:map[string]string{app: openshift-controller-manager-operator,},ClusterIP:10.217.5.58,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nF1009 00:07:59.206986 6876 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tqsrd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-sd2lj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.613619 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-vrlxd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T00:07:50Z\\\",\\\"message\\\":\\\"2025-10-09T00:07:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312\\\\n2025-10-09T00:07:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_962d3caa-80c1-476f-b30c-547a6b01e312 to /host/opt/cni/bin/\\\\n2025-10-09T00:07:05Z [verbose] multus-daemon started\\\\n2025-10-09T00:07:05Z [verbose] Readiness Indicator file check\\\\n2025-10-09T00:07:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T00:07:03Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T00:07:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-69qqx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-vrlxd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.627266 4810 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xpz29" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f74b5f49-e104-4aa7-9472-14d1e706785c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T00:07:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t87mn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T00:07:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xpz29\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:03Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.660511 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.660551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.660562 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.660576 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.660871 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.763462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.763520 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.763539 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.763564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.763582 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.866410 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.866471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.866509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.866548 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.866569 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.970100 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.970159 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.970177 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.970201 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:03 crc kubenswrapper[4810]: I1009 00:08:03.970219 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:03Z","lastTransitionTime":"2025-10-09T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.073471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.073551 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.073570 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.073595 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.073613 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.079053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.079103 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.079120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.079140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.079159 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: E1009 00:08:04.100147 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.105718 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.105759 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.105776 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.105798 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.105815 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: E1009 00:08:04.125853 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.132393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.132471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.132495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.132528 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.132553 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: E1009 00:08:04.155436 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.161152 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.161212 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.161260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.161289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.161307 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: E1009 00:08:04.182439 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.187516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.187573 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.187596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.187627 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.187645 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: E1009 00:08:04.208049 4810 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T00:08:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7658369b-ccb8-43ff-a2da-9dae70b9fe9a\\\",\\\"systemUUID\\\":\\\"d0ba6a9a-46d9-4e3d-9bad-8681de143186\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T00:08:04Z is after 2025-08-24T17:21:41Z" Oct 09 00:08:04 crc kubenswrapper[4810]: E1009 00:08:04.208277 4810 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.210301 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.210371 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.210389 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.210408 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.210424 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.314856 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.314905 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.314917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.314935 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.314950 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.417633 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.417682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.417693 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.417711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.417723 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.520543 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.520602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.520622 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.520647 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.520668 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.623300 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.623363 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.623384 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.623411 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.623428 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.726032 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.726080 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.726098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.726120 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.726137 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.829858 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.829895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.829904 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.829937 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.829949 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.933102 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.933178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.933204 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.933232 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:04 crc kubenswrapper[4810]: I1009 00:08:04.933254 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:04Z","lastTransitionTime":"2025-10-09T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.037424 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.037491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.037509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.037533 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.037550 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.140396 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.140521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.140602 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.140688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.140715 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.244189 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.244242 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.244260 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.244284 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.244300 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.253025 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.253083 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.253095 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:05 crc kubenswrapper[4810]: E1009 00:08:05.253173 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.253195 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:05 crc kubenswrapper[4810]: E1009 00:08:05.253437 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:05 crc kubenswrapper[4810]: E1009 00:08:05.253585 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:05 crc kubenswrapper[4810]: E1009 00:08:05.253649 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.348423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.348476 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.348493 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.348516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.348536 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.452009 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.452066 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.452084 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.452106 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.452124 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.555542 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.555603 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.555621 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.555650 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.555669 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.659610 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.659657 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.659670 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.659689 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.659701 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.762936 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.762999 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.763018 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.763042 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.763063 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.866246 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.866309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.866327 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.866352 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.866371 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.969278 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.969357 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.969390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.969513 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:05 crc kubenswrapper[4810]: I1009 00:08:05.969538 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:05Z","lastTransitionTime":"2025-10-09T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.072281 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.072319 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.072332 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.072349 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.072361 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.175565 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.175658 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.175676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.175699 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.175716 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.278716 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.278777 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.278799 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.278895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.278924 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.382202 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.382296 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.382316 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.382344 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.382362 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.484895 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.484954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.484973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.484998 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.485016 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.588392 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.588468 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.588491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.588517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.588537 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.691483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.691556 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.691574 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.691598 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.691617 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.794860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.794924 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.794941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.794967 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.794984 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.897973 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.898030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.898048 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.898078 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:06 crc kubenswrapper[4810]: I1009 00:08:06.898094 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:06Z","lastTransitionTime":"2025-10-09T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.001723 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.002116 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.002258 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.002541 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.002664 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.038118 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.038266 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.038238788 +0000 UTC m=+148.563877499 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.105140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.105191 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.105205 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.105255 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.105271 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.139299 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.139373 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.139417 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.139466 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139569 4810 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139574 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139603 4810 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139653 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.139630537 +0000 UTC m=+148.665269268 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139696 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.139671888 +0000 UTC m=+148.665310599 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139614 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139726 4810 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139615 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139753 4810 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139765 4810 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139767 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.13975574 +0000 UTC m=+148.665394461 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.139843 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.139803472 +0000 UTC m=+148.665442183 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.207860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.207925 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.207945 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.207968 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.207984 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.253756 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.253809 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.253764 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.253970 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.254083 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.254139 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.254287 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:07 crc kubenswrapper[4810]: E1009 00:08:07.254404 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.310938 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.311362 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.311549 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.311711 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.311892 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.414601 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.415031 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.415183 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.415342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.415485 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.518504 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.518923 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.519035 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.519178 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.519329 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.623442 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.623499 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.623517 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.623540 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.623558 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.726420 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.726476 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.726495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.726521 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.726539 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.829402 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.829932 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.829957 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.829992 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.830017 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.932951 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.933011 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.933030 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.933054 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:07 crc kubenswrapper[4810]: I1009 00:08:07.933070 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:07Z","lastTransitionTime":"2025-10-09T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.036883 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.036954 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.036977 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.037005 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.037030 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.139405 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.139462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.139486 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.139515 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.139538 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.243050 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.243130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.243151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.243194 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.243238 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.346481 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.346546 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.346568 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.346596 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.346617 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.450289 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.450668 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.450754 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.451076 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.451200 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.553907 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.553970 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.553990 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.554017 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.554039 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.656495 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.656563 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.656581 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.656607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.656624 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.760053 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.760113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.760131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.760156 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.760174 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.866835 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.867084 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.867210 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.867339 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.867410 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.970247 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.970598 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.970743 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.970985 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:08 crc kubenswrapper[4810]: I1009 00:08:08.971130 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:08Z","lastTransitionTime":"2025-10-09T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.074231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.074291 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.074309 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.074334 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.074355 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.177436 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.177575 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.177597 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.177673 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.177699 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.253371 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.253418 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.253458 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:09 crc kubenswrapper[4810]: E1009 00:08:09.253602 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.253661 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:09 crc kubenswrapper[4810]: E1009 00:08:09.253687 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:09 crc kubenswrapper[4810]: E1009 00:08:09.253848 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:09 crc kubenswrapper[4810]: E1009 00:08:09.254027 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.280991 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.281050 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.281062 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.281078 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.281089 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.384065 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.384132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.384157 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.384186 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.384206 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.487216 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.487270 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.487287 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.487307 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.487321 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.590483 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.590537 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.590555 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.590579 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.590597 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.693043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.693104 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.693122 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.693142 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.693159 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.799494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.799571 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.799727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.799948 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.800591 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.902753 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.902814 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.902863 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.902888 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:09 crc kubenswrapper[4810]: I1009 00:08:09.902907 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:09Z","lastTransitionTime":"2025-10-09T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.005379 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.005414 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.005423 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.005437 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.005447 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.108043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.108097 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.108113 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.108131 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.108144 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.211756 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.211798 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.211809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.211840 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.211850 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.315446 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.315529 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.315552 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.315586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.315640 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.418607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.418670 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.418688 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.418713 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.418729 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.522425 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.522496 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.522516 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.522545 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.522566 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.626320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.626401 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.626427 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.626459 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.626481 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.729703 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.729800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.729853 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.729889 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.729909 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.833619 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.833682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.833701 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.833727 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.833745 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.936809 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.936899 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.936917 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.936941 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:10 crc kubenswrapper[4810]: I1009 00:08:10.936959 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:10Z","lastTransitionTime":"2025-10-09T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.040368 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.040430 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.040448 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.040471 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.040490 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.143089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.143163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.143187 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.143220 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.143245 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.246937 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.247014 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.247031 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.247062 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.247080 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.253279 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.253331 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:11 crc kubenswrapper[4810]: E1009 00:08:11.253438 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.253517 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.253542 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:11 crc kubenswrapper[4810]: E1009 00:08:11.253925 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:11 crc kubenswrapper[4810]: E1009 00:08:11.253696 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:11 crc kubenswrapper[4810]: E1009 00:08:11.254038 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.350509 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.350587 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.350779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.350815 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.350863 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.455128 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.455193 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.455206 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.455231 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.455247 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.558607 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.558682 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.558705 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.558737 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.558760 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.661860 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.662174 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.662195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.662253 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.662271 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.766016 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.766071 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.766090 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.766112 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.766128 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.869320 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.869394 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.869418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.869447 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.869465 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.972578 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.972652 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.972676 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.972707 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:11 crc kubenswrapper[4810]: I1009 00:08:11.972730 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:11Z","lastTransitionTime":"2025-10-09T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.076494 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.076564 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.076586 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.076614 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.076638 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.179419 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.179463 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.179474 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.179490 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.179502 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.282353 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.282393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.282403 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.282418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.282429 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.385390 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.385432 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.385445 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.385462 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.385478 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.492996 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.493339 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.493352 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.493369 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.493381 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.597280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.597342 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.597364 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.597393 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.597415 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.700800 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.700918 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.700940 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.700969 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.700989 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.803198 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.803261 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.803279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.803303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.803320 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.906137 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.906195 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.906213 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.906235 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:12 crc kubenswrapper[4810]: I1009 00:08:12.906252 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:12Z","lastTransitionTime":"2025-10-09T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.009418 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.009475 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.009491 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.009515 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.009533 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.113084 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.113164 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.113182 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.113207 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.113222 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.216946 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.217023 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.217043 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.217070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.217107 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.253597 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.253711 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.253798 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.253805 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:13 crc kubenswrapper[4810]: E1009 00:08:13.253798 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:13 crc kubenswrapper[4810]: E1009 00:08:13.253988 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:13 crc kubenswrapper[4810]: E1009 00:08:13.254243 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:13 crc kubenswrapper[4810]: E1009 00:08:13.255011 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.256513 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:08:13 crc kubenswrapper[4810]: E1009 00:08:13.256897 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.315809 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=69.315776663 podStartE2EDuration="1m9.315776663s" podCreationTimestamp="2025-10-09 00:07:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.290618421 +0000 UTC m=+90.816257132" watchObservedRunningTime="2025-10-09 00:08:13.315776663 +0000 UTC m=+90.841415404" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.323217 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.323280 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.323305 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.323333 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.323355 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.335318 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-bw4pj" podStartSLOduration=71.335293045 podStartE2EDuration="1m11.335293045s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.334813082 +0000 UTC m=+90.860451843" watchObservedRunningTime="2025-10-09 00:08:13.335293045 +0000 UTC m=+90.860931786" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.371994 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podStartSLOduration=71.371975653 podStartE2EDuration="1m11.371975653s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.370742908 +0000 UTC m=+90.896381629" watchObservedRunningTime="2025-10-09 00:08:13.371975653 +0000 UTC m=+90.897614364" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.402708 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=71.402688062 podStartE2EDuration="1m11.402688062s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.402467706 +0000 UTC m=+90.928106417" watchObservedRunningTime="2025-10-09 00:08:13.402688062 +0000 UTC m=+90.928326773" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.425098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.425136 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.425147 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.425163 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.425175 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.443417 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.443396524 podStartE2EDuration="38.443396524s" podCreationTimestamp="2025-10-09 00:07:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.416965706 +0000 UTC m=+90.942604417" watchObservedRunningTime="2025-10-09 00:08:13.443396524 +0000 UTC m=+90.969035225" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.443940 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=68.443934129 podStartE2EDuration="1m8.443934129s" podCreationTimestamp="2025-10-09 00:07:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.442262112 +0000 UTC m=+90.967900813" watchObservedRunningTime="2025-10-09 00:08:13.443934129 +0000 UTC m=+90.969572830" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.479771 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-5qcmj" podStartSLOduration=71.479753413 podStartE2EDuration="1m11.479753413s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.466330533 +0000 UTC m=+90.991969234" watchObservedRunningTime="2025-10-09 00:08:13.479753413 +0000 UTC m=+91.005392114" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.526894 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.526934 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.526943 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.526960 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.526973 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.537436 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-vrlxd" podStartSLOduration=71.537421405 podStartE2EDuration="1m11.537421405s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.536573941 +0000 UTC m=+91.062212652" watchObservedRunningTime="2025-10-09 00:08:13.537421405 +0000 UTC m=+91.063060116" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.583173 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=16.583152889 podStartE2EDuration="16.583152889s" podCreationTimestamp="2025-10-09 00:07:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.564936993 +0000 UTC m=+91.090575694" watchObservedRunningTime="2025-10-09 00:08:13.583152889 +0000 UTC m=+91.108791590" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.583958 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-lzf84" podStartSLOduration=71.583949811 podStartE2EDuration="1m11.583949811s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.583007525 +0000 UTC m=+91.108646246" watchObservedRunningTime="2025-10-09 00:08:13.583949811 +0000 UTC m=+91.109588522" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.629051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.629104 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.629125 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.629151 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.629174 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.731236 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.731279 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.731290 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.731306 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.731317 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.834027 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.834089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.834107 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.834130 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.834148 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.937055 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.937105 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.937115 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.937132 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:13 crc kubenswrapper[4810]: I1009 00:08:13.937143 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:13Z","lastTransitionTime":"2025-10-09T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.039648 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.039725 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.039749 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.039779 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.039801 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:14Z","lastTransitionTime":"2025-10-09T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.143037 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.143098 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.143114 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.143140 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.143158 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:14Z","lastTransitionTime":"2025-10-09T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.245987 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.246051 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.246070 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.246094 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.246114 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:14Z","lastTransitionTime":"2025-10-09T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.349089 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.349146 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.349165 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.349188 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.349206 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:14Z","lastTransitionTime":"2025-10-09T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.452335 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.452417 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.452440 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.452470 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.452495 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:14Z","lastTransitionTime":"2025-10-09T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.529229 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.529303 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.529322 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.529348 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.529371 4810 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T00:08:14Z","lastTransitionTime":"2025-10-09T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.609949 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xsv9t" podStartSLOduration=71.609918502 podStartE2EDuration="1m11.609918502s" podCreationTimestamp="2025-10-09 00:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:13.59697687 +0000 UTC m=+91.122615571" watchObservedRunningTime="2025-10-09 00:08:14.609918502 +0000 UTC m=+92.135557233" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.611773 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9"] Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.612396 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.614741 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.614972 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.615407 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.618767 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.724355 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/487c1408-40c3-4ff6-ad9a-753cc4911379-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.724429 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/487c1408-40c3-4ff6-ad9a-753cc4911379-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.724496 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/487c1408-40c3-4ff6-ad9a-753cc4911379-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.724652 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/487c1408-40c3-4ff6-ad9a-753cc4911379-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.724756 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/487c1408-40c3-4ff6-ad9a-753cc4911379-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.825487 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/487c1408-40c3-4ff6-ad9a-753cc4911379-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.825573 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/487c1408-40c3-4ff6-ad9a-753cc4911379-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.825602 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/487c1408-40c3-4ff6-ad9a-753cc4911379-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.825612 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/487c1408-40c3-4ff6-ad9a-753cc4911379-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.826129 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/487c1408-40c3-4ff6-ad9a-753cc4911379-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.826209 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/487c1408-40c3-4ff6-ad9a-753cc4911379-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.826286 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/487c1408-40c3-4ff6-ad9a-753cc4911379-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.827216 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/487c1408-40c3-4ff6-ad9a-753cc4911379-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.835976 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/487c1408-40c3-4ff6-ad9a-753cc4911379-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.855068 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/487c1408-40c3-4ff6-ad9a-753cc4911379-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bdwk9\" (UID: \"487c1408-40c3-4ff6-ad9a-753cc4911379\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:14 crc kubenswrapper[4810]: I1009 00:08:14.935306 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.252905 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.252987 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:15 crc kubenswrapper[4810]: E1009 00:08:15.253099 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.252922 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:15 crc kubenswrapper[4810]: E1009 00:08:15.253268 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.253321 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:15 crc kubenswrapper[4810]: E1009 00:08:15.253399 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:15 crc kubenswrapper[4810]: E1009 00:08:15.253476 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.814909 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" event={"ID":"487c1408-40c3-4ff6-ad9a-753cc4911379","Type":"ContainerStarted","Data":"1b0281734fdd0bbf6ce85eb1fc46b00d9457dd0c22456a7149231e0ed5d5c219"} Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.814980 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" event={"ID":"487c1408-40c3-4ff6-ad9a-753cc4911379","Type":"ContainerStarted","Data":"5c19f206a950184a9a4b2543015fc19218bd5003e3566507cf8308df97f5935d"} Oct 09 00:08:15 crc kubenswrapper[4810]: I1009 00:08:15.836137 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bdwk9" podStartSLOduration=73.836115728 podStartE2EDuration="1m13.836115728s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:15.834652996 +0000 UTC m=+93.360291727" watchObservedRunningTime="2025-10-09 00:08:15.836115728 +0000 UTC m=+93.361754489" Oct 09 00:08:17 crc kubenswrapper[4810]: I1009 00:08:17.254063 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:17 crc kubenswrapper[4810]: I1009 00:08:17.254093 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:17 crc kubenswrapper[4810]: I1009 00:08:17.254183 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:17 crc kubenswrapper[4810]: I1009 00:08:17.254217 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:17 crc kubenswrapper[4810]: E1009 00:08:17.254339 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:17 crc kubenswrapper[4810]: E1009 00:08:17.254648 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:17 crc kubenswrapper[4810]: E1009 00:08:17.254770 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:17 crc kubenswrapper[4810]: E1009 00:08:17.254925 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:19 crc kubenswrapper[4810]: I1009 00:08:19.253188 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:19 crc kubenswrapper[4810]: E1009 00:08:19.253691 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:19 crc kubenswrapper[4810]: I1009 00:08:19.253224 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:19 crc kubenswrapper[4810]: E1009 00:08:19.253891 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:19 crc kubenswrapper[4810]: I1009 00:08:19.253759 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:19 crc kubenswrapper[4810]: E1009 00:08:19.253978 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:19 crc kubenswrapper[4810]: I1009 00:08:19.254128 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:19 crc kubenswrapper[4810]: E1009 00:08:19.254284 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:21 crc kubenswrapper[4810]: I1009 00:08:21.203121 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:21 crc kubenswrapper[4810]: E1009 00:08:21.203307 4810 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:08:21 crc kubenswrapper[4810]: E1009 00:08:21.203406 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs podName:f74b5f49-e104-4aa7-9472-14d1e706785c nodeName:}" failed. No retries permitted until 2025-10-09 00:09:25.203383594 +0000 UTC m=+162.729022305 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs") pod "network-metrics-daemon-xpz29" (UID: "f74b5f49-e104-4aa7-9472-14d1e706785c") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 00:08:21 crc kubenswrapper[4810]: I1009 00:08:21.253965 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:21 crc kubenswrapper[4810]: I1009 00:08:21.254013 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:21 crc kubenswrapper[4810]: I1009 00:08:21.254018 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:21 crc kubenswrapper[4810]: I1009 00:08:21.253980 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:21 crc kubenswrapper[4810]: E1009 00:08:21.254163 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:21 crc kubenswrapper[4810]: E1009 00:08:21.254250 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:21 crc kubenswrapper[4810]: E1009 00:08:21.254401 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:21 crc kubenswrapper[4810]: E1009 00:08:21.254503 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:23 crc kubenswrapper[4810]: I1009 00:08:23.253541 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:23 crc kubenswrapper[4810]: I1009 00:08:23.256217 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:23 crc kubenswrapper[4810]: I1009 00:08:23.256291 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:23 crc kubenswrapper[4810]: E1009 00:08:23.256398 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:23 crc kubenswrapper[4810]: I1009 00:08:23.256654 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:23 crc kubenswrapper[4810]: E1009 00:08:23.256795 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:23 crc kubenswrapper[4810]: E1009 00:08:23.257144 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:23 crc kubenswrapper[4810]: E1009 00:08:23.257394 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:25 crc kubenswrapper[4810]: I1009 00:08:25.253779 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:25 crc kubenswrapper[4810]: I1009 00:08:25.253886 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:25 crc kubenswrapper[4810]: I1009 00:08:25.253913 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:25 crc kubenswrapper[4810]: I1009 00:08:25.253989 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:25 crc kubenswrapper[4810]: E1009 00:08:25.254050 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:25 crc kubenswrapper[4810]: E1009 00:08:25.254155 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:25 crc kubenswrapper[4810]: E1009 00:08:25.254248 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:25 crc kubenswrapper[4810]: E1009 00:08:25.254330 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:27 crc kubenswrapper[4810]: I1009 00:08:27.253735 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:27 crc kubenswrapper[4810]: E1009 00:08:27.253917 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:27 crc kubenswrapper[4810]: I1009 00:08:27.253754 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:27 crc kubenswrapper[4810]: I1009 00:08:27.254081 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:27 crc kubenswrapper[4810]: I1009 00:08:27.254676 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:27 crc kubenswrapper[4810]: E1009 00:08:27.255091 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:27 crc kubenswrapper[4810]: E1009 00:08:27.255081 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:27 crc kubenswrapper[4810]: I1009 00:08:27.255186 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:08:27 crc kubenswrapper[4810]: E1009 00:08:27.254711 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:27 crc kubenswrapper[4810]: E1009 00:08:27.255681 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:08:29 crc kubenswrapper[4810]: I1009 00:08:29.253339 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:29 crc kubenswrapper[4810]: I1009 00:08:29.253435 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:29 crc kubenswrapper[4810]: E1009 00:08:29.254292 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:29 crc kubenswrapper[4810]: I1009 00:08:29.253483 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:29 crc kubenswrapper[4810]: E1009 00:08:29.254403 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:29 crc kubenswrapper[4810]: E1009 00:08:29.254131 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:29 crc kubenswrapper[4810]: I1009 00:08:29.253476 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:29 crc kubenswrapper[4810]: E1009 00:08:29.254781 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:31 crc kubenswrapper[4810]: I1009 00:08:31.252903 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:31 crc kubenswrapper[4810]: I1009 00:08:31.252981 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:31 crc kubenswrapper[4810]: I1009 00:08:31.253193 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:31 crc kubenswrapper[4810]: E1009 00:08:31.253612 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:31 crc kubenswrapper[4810]: I1009 00:08:31.253690 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:31 crc kubenswrapper[4810]: E1009 00:08:31.253901 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:31 crc kubenswrapper[4810]: E1009 00:08:31.254002 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:31 crc kubenswrapper[4810]: E1009 00:08:31.254258 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:33 crc kubenswrapper[4810]: I1009 00:08:33.253056 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:33 crc kubenswrapper[4810]: I1009 00:08:33.255411 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:33 crc kubenswrapper[4810]: E1009 00:08:33.255415 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:33 crc kubenswrapper[4810]: I1009 00:08:33.255467 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:33 crc kubenswrapper[4810]: I1009 00:08:33.255492 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:33 crc kubenswrapper[4810]: E1009 00:08:33.255652 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:33 crc kubenswrapper[4810]: E1009 00:08:33.255723 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:33 crc kubenswrapper[4810]: E1009 00:08:33.255900 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:35 crc kubenswrapper[4810]: I1009 00:08:35.253882 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:35 crc kubenswrapper[4810]: I1009 00:08:35.253810 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:35 crc kubenswrapper[4810]: I1009 00:08:35.253951 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:35 crc kubenswrapper[4810]: I1009 00:08:35.253902 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:35 crc kubenswrapper[4810]: E1009 00:08:35.254046 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:35 crc kubenswrapper[4810]: E1009 00:08:35.254159 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:35 crc kubenswrapper[4810]: E1009 00:08:35.254321 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:35 crc kubenswrapper[4810]: E1009 00:08:35.254406 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:36 crc kubenswrapper[4810]: I1009 00:08:36.901235 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/1.log" Oct 09 00:08:36 crc kubenswrapper[4810]: I1009 00:08:36.901796 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/0.log" Oct 09 00:08:36 crc kubenswrapper[4810]: I1009 00:08:36.901936 4810 generic.go:334] "Generic (PLEG): container finished" podID="8e9fffc1-16a6-4108-978b-6e85bdfd9c4f" containerID="58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136" exitCode=1 Oct 09 00:08:36 crc kubenswrapper[4810]: I1009 00:08:36.901999 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerDied","Data":"58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136"} Oct 09 00:08:36 crc kubenswrapper[4810]: I1009 00:08:36.902055 4810 scope.go:117] "RemoveContainer" containerID="f445bcb2e7cb69ae05127c1b3a2bd742afd997c95048e16dfda8386f8f7db095" Oct 09 00:08:36 crc kubenswrapper[4810]: I1009 00:08:36.902860 4810 scope.go:117] "RemoveContainer" containerID="58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136" Oct 09 00:08:36 crc kubenswrapper[4810]: E1009 00:08:36.903338 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-vrlxd_openshift-multus(8e9fffc1-16a6-4108-978b-6e85bdfd9c4f)\"" pod="openshift-multus/multus-vrlxd" podUID="8e9fffc1-16a6-4108-978b-6e85bdfd9c4f" Oct 09 00:08:37 crc kubenswrapper[4810]: I1009 00:08:37.253022 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:37 crc kubenswrapper[4810]: I1009 00:08:37.253060 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:37 crc kubenswrapper[4810]: E1009 00:08:37.253306 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:37 crc kubenswrapper[4810]: I1009 00:08:37.253344 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:37 crc kubenswrapper[4810]: I1009 00:08:37.253430 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:37 crc kubenswrapper[4810]: E1009 00:08:37.253462 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:37 crc kubenswrapper[4810]: E1009 00:08:37.253562 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:37 crc kubenswrapper[4810]: E1009 00:08:37.253808 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:37 crc kubenswrapper[4810]: I1009 00:08:37.908363 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/1.log" Oct 09 00:08:38 crc kubenswrapper[4810]: I1009 00:08:38.253845 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:08:38 crc kubenswrapper[4810]: E1009 00:08:38.254026 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-sd2lj_openshift-ovn-kubernetes(bfb7a412-4af9-4aa0-a3e8-d46dab040385)\"" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" Oct 09 00:08:39 crc kubenswrapper[4810]: I1009 00:08:39.252993 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:39 crc kubenswrapper[4810]: I1009 00:08:39.253065 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:39 crc kubenswrapper[4810]: I1009 00:08:39.253092 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:39 crc kubenswrapper[4810]: I1009 00:08:39.253105 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:39 crc kubenswrapper[4810]: E1009 00:08:39.253248 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:39 crc kubenswrapper[4810]: E1009 00:08:39.253388 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:39 crc kubenswrapper[4810]: E1009 00:08:39.253632 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:39 crc kubenswrapper[4810]: E1009 00:08:39.253750 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:41 crc kubenswrapper[4810]: I1009 00:08:41.253917 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:41 crc kubenswrapper[4810]: I1009 00:08:41.253957 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:41 crc kubenswrapper[4810]: I1009 00:08:41.254000 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:41 crc kubenswrapper[4810]: E1009 00:08:41.254150 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:41 crc kubenswrapper[4810]: I1009 00:08:41.254191 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:41 crc kubenswrapper[4810]: E1009 00:08:41.254357 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:41 crc kubenswrapper[4810]: E1009 00:08:41.254600 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:41 crc kubenswrapper[4810]: E1009 00:08:41.254658 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:43 crc kubenswrapper[4810]: I1009 00:08:43.253517 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:43 crc kubenswrapper[4810]: I1009 00:08:43.254467 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:43 crc kubenswrapper[4810]: I1009 00:08:43.254475 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:43 crc kubenswrapper[4810]: E1009 00:08:43.254582 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:43 crc kubenswrapper[4810]: I1009 00:08:43.254665 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:43 crc kubenswrapper[4810]: E1009 00:08:43.254870 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:43 crc kubenswrapper[4810]: E1009 00:08:43.254919 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:43 crc kubenswrapper[4810]: E1009 00:08:43.254968 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:43 crc kubenswrapper[4810]: E1009 00:08:43.270073 4810 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 09 00:08:43 crc kubenswrapper[4810]: E1009 00:08:43.349688 4810 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 00:08:45 crc kubenswrapper[4810]: I1009 00:08:45.253458 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:45 crc kubenswrapper[4810]: I1009 00:08:45.253572 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:45 crc kubenswrapper[4810]: E1009 00:08:45.253629 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:45 crc kubenswrapper[4810]: E1009 00:08:45.253781 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:45 crc kubenswrapper[4810]: I1009 00:08:45.253932 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:45 crc kubenswrapper[4810]: E1009 00:08:45.254031 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:45 crc kubenswrapper[4810]: I1009 00:08:45.254116 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:45 crc kubenswrapper[4810]: E1009 00:08:45.254234 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:47 crc kubenswrapper[4810]: I1009 00:08:47.253691 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:47 crc kubenswrapper[4810]: I1009 00:08:47.253704 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:47 crc kubenswrapper[4810]: I1009 00:08:47.253666 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:47 crc kubenswrapper[4810]: I1009 00:08:47.253731 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:47 crc kubenswrapper[4810]: E1009 00:08:47.253960 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:47 crc kubenswrapper[4810]: E1009 00:08:47.254084 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:47 crc kubenswrapper[4810]: E1009 00:08:47.254220 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:47 crc kubenswrapper[4810]: E1009 00:08:47.254305 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:48 crc kubenswrapper[4810]: E1009 00:08:48.351382 4810 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 00:08:49 crc kubenswrapper[4810]: I1009 00:08:49.252795 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:49 crc kubenswrapper[4810]: I1009 00:08:49.252882 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:49 crc kubenswrapper[4810]: I1009 00:08:49.252930 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:49 crc kubenswrapper[4810]: E1009 00:08:49.253007 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:49 crc kubenswrapper[4810]: I1009 00:08:49.253070 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:49 crc kubenswrapper[4810]: E1009 00:08:49.253165 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:49 crc kubenswrapper[4810]: E1009 00:08:49.253256 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:49 crc kubenswrapper[4810]: E1009 00:08:49.253386 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:50 crc kubenswrapper[4810]: I1009 00:08:50.254954 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:08:50 crc kubenswrapper[4810]: I1009 00:08:50.954370 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/3.log" Oct 09 00:08:50 crc kubenswrapper[4810]: I1009 00:08:50.957074 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerStarted","Data":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} Oct 09 00:08:50 crc kubenswrapper[4810]: I1009 00:08:50.957483 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.006993 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podStartSLOduration=109.006971011 podStartE2EDuration="1m49.006971011s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:08:51.00586406 +0000 UTC m=+128.531502791" watchObservedRunningTime="2025-10-09 00:08:51.006971011 +0000 UTC m=+128.532609722" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.253030 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:51 crc kubenswrapper[4810]: E1009 00:08:51.253215 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.253453 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.253527 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.253535 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:51 crc kubenswrapper[4810]: E1009 00:08:51.253806 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.253877 4810 scope.go:117] "RemoveContainer" containerID="58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136" Oct 09 00:08:51 crc kubenswrapper[4810]: E1009 00:08:51.253917 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:51 crc kubenswrapper[4810]: E1009 00:08:51.254056 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.439076 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-xpz29"] Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.963186 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/1.log" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.963286 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:51 crc kubenswrapper[4810]: I1009 00:08:51.963287 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerStarted","Data":"2a35eb03a81dc3bf579210892f31a0deddf8aadd9d38af46a3613c7c5b5bec42"} Oct 09 00:08:51 crc kubenswrapper[4810]: E1009 00:08:51.963468 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:53 crc kubenswrapper[4810]: I1009 00:08:53.253165 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:53 crc kubenswrapper[4810]: I1009 00:08:53.253231 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:53 crc kubenswrapper[4810]: I1009 00:08:53.253387 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:53 crc kubenswrapper[4810]: E1009 00:08:53.254876 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:53 crc kubenswrapper[4810]: E1009 00:08:53.255014 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:53 crc kubenswrapper[4810]: E1009 00:08:53.255233 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:53 crc kubenswrapper[4810]: I1009 00:08:53.254688 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:53 crc kubenswrapper[4810]: E1009 00:08:53.255904 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:53 crc kubenswrapper[4810]: E1009 00:08:53.352245 4810 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 00:08:55 crc kubenswrapper[4810]: I1009 00:08:55.253843 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:55 crc kubenswrapper[4810]: I1009 00:08:55.253930 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:55 crc kubenswrapper[4810]: E1009 00:08:55.254031 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:55 crc kubenswrapper[4810]: I1009 00:08:55.254076 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:55 crc kubenswrapper[4810]: E1009 00:08:55.254207 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:55 crc kubenswrapper[4810]: I1009 00:08:55.254219 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:55 crc kubenswrapper[4810]: E1009 00:08:55.254365 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:55 crc kubenswrapper[4810]: E1009 00:08:55.254480 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:57 crc kubenswrapper[4810]: I1009 00:08:57.253642 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:57 crc kubenswrapper[4810]: E1009 00:08:57.254197 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 00:08:57 crc kubenswrapper[4810]: I1009 00:08:57.253668 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:57 crc kubenswrapper[4810]: E1009 00:08:57.254328 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 00:08:57 crc kubenswrapper[4810]: I1009 00:08:57.253716 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:57 crc kubenswrapper[4810]: E1009 00:08:57.254427 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 00:08:57 crc kubenswrapper[4810]: I1009 00:08:57.253657 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:57 crc kubenswrapper[4810]: E1009 00:08:57.254539 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xpz29" podUID="f74b5f49-e104-4aa7-9472-14d1e706785c" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.253716 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.255026 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.255080 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.255095 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.257286 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.257477 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.258018 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.258137 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.258168 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 09 00:08:59 crc kubenswrapper[4810]: I1009 00:08:59.258448 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 09 00:09:04 crc kubenswrapper[4810]: I1009 00:09:04.733192 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.822412 4810 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.864272 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mzlgx"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.865084 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.865237 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ch5jb"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.865944 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.871729 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.871790 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.871943 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.872072 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.872183 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.872354 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.871765 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.872763 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.872791 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.873403 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.873625 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.873745 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.874061 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.885769 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-config\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.885909 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-image-import-ca\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.885956 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7d4d7acd-916e-45d1-8d4d-990c386d806d-node-pullsecrets\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886005 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-etcd-serving-ca\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886073 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-audit\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886119 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-encryption-config\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886174 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d4d7acd-916e-45d1-8d4d-990c386d806d-audit-dir\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886218 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lr8k\" (UniqueName: \"kubernetes.io/projected/7d4d7acd-916e-45d1-8d4d-990c386d806d-kube-api-access-7lr8k\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886259 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-client-ca\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886315 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-config\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886355 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-etcd-client\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886396 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-serving-cert\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886462 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.886994 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.889072 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.889215 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.894085 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.894609 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.894707 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.897877 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29332800-wtmbd"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.898598 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.899788 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.900068 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.900281 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.900521 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.900540 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.900559 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.901026 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.902519 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.903335 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.903865 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.904252 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.904636 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.904683 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.905115 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.908518 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.909192 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-tmpb8"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.909725 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.909885 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lsgnf"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.910432 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.914207 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.921235 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-685td"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.941011 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.941315 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.941784 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942091 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942143 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942180 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942234 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942307 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942348 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942413 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942420 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942531 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942417 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942547 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942106 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.942748 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.943407 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.944675 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-5jswd"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.945445 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.945790 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.946146 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.946367 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.952777 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-thprd"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.953095 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.953317 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-trv84"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.953519 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-78fsr"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.954022 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.957004 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.957067 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.957204 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.957269 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.957961 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.957004 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.958007 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.958199 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.958308 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.958353 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.960675 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.960838 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.960941 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.961001 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.961320 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.961357 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.961639 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.963386 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vfwx7"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.963784 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.964117 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.965550 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.965963 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.966167 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.966434 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.966972 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.967069 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.967338 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.967552 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.967870 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.968040 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.968785 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.970799 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.971416 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.971994 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.972379 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.972540 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.973716 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.972424 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.973942 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.974316 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.974678 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.976909 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.977043 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.977812 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.978506 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.981572 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982488 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982498 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982573 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982656 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982726 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982759 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982794 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982874 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74"] Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982896 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 09 00:09:05 crc kubenswrapper[4810]: I1009 00:09:05.982905 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.010564 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.011567 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.011744 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.012103 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.016994 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.017041 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dhpq4"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018488 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018625 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-audit-policies\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018659 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/06f5797c-1640-413d-882d-957c10daea4c-metrics-tls\") pod \"dns-operator-744455d44c-thprd\" (UID: \"06f5797c-1640-413d-882d-957c10daea4c\") " pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018686 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-etcd-serving-ca\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fdb80d-d467-44d7-9859-efcc2fff59e6-trusted-ca\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018801 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dvkw\" (UniqueName: \"kubernetes.io/projected/9592f7ec-a684-4b31-97b3-32c3439a8ee0-kube-api-access-2dvkw\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018835 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rm7g\" (UniqueName: \"kubernetes.io/projected/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-kube-api-access-9rm7g\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018854 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7c410054-569a-4d33-8e55-87f28ba661f6-audit-dir\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018872 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/06fdb80d-d467-44d7-9859-efcc2fff59e6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018900 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-audit\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018920 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-encryption-config\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018941 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d4d7acd-916e-45d1-8d4d-990c386d806d-audit-dir\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018962 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf7w2\" (UniqueName: \"kubernetes.io/projected/456f2042-395a-424b-80c1-3bc40286210d-kube-api-access-mf7w2\") pod \"control-plane-machine-set-operator-78cbb6b69f-jp7tg\" (UID: \"456f2042-395a-424b-80c1-3bc40286210d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018978 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-serving-cert\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.018997 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lr8k\" (UniqueName: \"kubernetes.io/projected/7d4d7acd-916e-45d1-8d4d-990c386d806d-kube-api-access-7lr8k\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019015 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-client-ca\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019041 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-config\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019057 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-etcd-client\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019076 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9592f7ec-a684-4b31-97b3-32c3439a8ee0-serving-cert\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019095 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdv9k\" (UniqueName: \"kubernetes.io/projected/e1bff448-76f4-4bda-a346-9ba668d4d05c-kube-api-access-sdv9k\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019110 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/06fdb80d-d467-44d7-9859-efcc2fff59e6-metrics-tls\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019129 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-serving-cert\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019147 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019168 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1bff448-76f4-4bda-a346-9ba668d4d05c-serving-cert\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019186 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019218 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/456f2042-395a-424b-80c1-3bc40286210d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-jp7tg\" (UID: \"456f2042-395a-424b-80c1-3bc40286210d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.019320 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.022019 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.023704 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-audit\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.034173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.034468 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6nbw\" (UniqueName: \"kubernetes.io/projected/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-kube-api-access-t6nbw\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.035109 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7d4d7acd-916e-45d1-8d4d-990c386d806d-audit-dir\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.035253 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.035296 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-config\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.035384 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.035436 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.035471 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-etcd-client\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.041702 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fqzl\" (UniqueName: \"kubernetes.io/projected/06f5797c-1640-413d-882d-957c10daea4c-kube-api-access-5fqzl\") pod \"dns-operator-744455d44c-thprd\" (UID: \"06f5797c-1640-413d-882d-957c10daea4c\") " pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.041746 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vv4n\" (UniqueName: \"kubernetes.io/projected/7c410054-569a-4d33-8e55-87f28ba661f6-kube-api-access-6vv4n\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.041790 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26mdw\" (UniqueName: \"kubernetes.io/projected/06fdb80d-d467-44d7-9859-efcc2fff59e6-kube-api-access-26mdw\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.041857 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-config\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.041930 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-service-ca-bundle\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.041955 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-image-import-ca\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.042035 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7d4d7acd-916e-45d1-8d4d-990c386d806d-node-pullsecrets\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.042065 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-config\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.042090 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.042134 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-encryption-config\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.036940 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-config\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.037551 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-etcd-serving-ca\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.037677 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-client-ca\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.037698 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.044219 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-etcd-client\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.044985 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-config\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.037926 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.045032 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7d4d7acd-916e-45d1-8d4d-990c386d806d-image-import-ca\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.045074 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.042240 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.045156 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7d4d7acd-916e-45d1-8d4d-990c386d806d-node-pullsecrets\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.042495 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.038199 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.044980 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.045021 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.045698 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-encryption-config\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.047402 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.047490 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.049085 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.049189 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.049526 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d4d7acd-916e-45d1-8d4d-990c386d806d-serving-cert\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.049719 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.051081 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.051562 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.054112 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.054779 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.056806 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.057286 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.059664 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.063087 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.063506 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-blbp4"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.064314 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.064919 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gwvst"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.065586 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.066158 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.067448 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.069177 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-w5fvk"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.070092 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.071649 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.072535 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.072636 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.073265 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.073702 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.074241 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.074872 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.075383 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.075777 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-z2zxv"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.076309 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.076675 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.077169 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.080393 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6hchn"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.081886 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.083879 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.096654 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.097982 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.098768 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.098802 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-685td"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.098836 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.098864 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ch5jb"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.099169 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.103174 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-thprd"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.107938 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-trv84"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.108219 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.109355 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-tmpb8"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.112294 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-kvfcw"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.114398 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.114540 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.118626 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-w5fvk"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.120482 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dhpq4"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.121440 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lsgnf"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.122904 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.124027 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29332800-wtmbd"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.125453 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gwvst"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.127722 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.128697 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vfwx7"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.130154 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-5jswd"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.131573 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.132778 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.134208 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.135177 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.136289 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.137319 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.138597 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.139626 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.141222 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.142518 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.142902 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rm7g\" (UniqueName: \"kubernetes.io/projected/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-kube-api-access-9rm7g\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.142926 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7c410054-569a-4d33-8e55-87f28ba661f6-audit-dir\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.142946 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/06fdb80d-d467-44d7-9859-efcc2fff59e6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.142973 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf7w2\" (UniqueName: \"kubernetes.io/projected/456f2042-395a-424b-80c1-3bc40286210d-kube-api-access-mf7w2\") pod \"control-plane-machine-set-operator-78cbb6b69f-jp7tg\" (UID: \"456f2042-395a-424b-80c1-3bc40286210d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.142991 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-serving-cert\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143021 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9592f7ec-a684-4b31-97b3-32c3439a8ee0-serving-cert\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143035 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7c410054-569a-4d33-8e55-87f28ba661f6-audit-dir\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143040 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdv9k\" (UniqueName: \"kubernetes.io/projected/e1bff448-76f4-4bda-a346-9ba668d4d05c-kube-api-access-sdv9k\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143097 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/06fdb80d-d467-44d7-9859-efcc2fff59e6-metrics-tls\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143120 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143138 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1bff448-76f4-4bda-a346-9ba668d4d05c-serving-cert\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143160 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143197 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/456f2042-395a-424b-80c1-3bc40286210d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-jp7tg\" (UID: \"456f2042-395a-424b-80c1-3bc40286210d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143215 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143249 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6nbw\" (UniqueName: \"kubernetes.io/projected/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-kube-api-access-t6nbw\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143268 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-config\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143286 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143302 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143319 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-etcd-client\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143335 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fqzl\" (UniqueName: \"kubernetes.io/projected/06f5797c-1640-413d-882d-957c10daea4c-kube-api-access-5fqzl\") pod \"dns-operator-744455d44c-thprd\" (UID: \"06f5797c-1640-413d-882d-957c10daea4c\") " pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143350 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vv4n\" (UniqueName: \"kubernetes.io/projected/7c410054-569a-4d33-8e55-87f28ba661f6-kube-api-access-6vv4n\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143368 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26mdw\" (UniqueName: \"kubernetes.io/projected/06fdb80d-d467-44d7-9859-efcc2fff59e6-kube-api-access-26mdw\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143415 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-service-ca-bundle\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143435 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-config\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143451 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143468 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-encryption-config\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143485 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-audit-policies\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143503 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/06f5797c-1640-413d-882d-957c10daea4c-metrics-tls\") pod \"dns-operator-744455d44c-thprd\" (UID: \"06f5797c-1640-413d-882d-957c10daea4c\") " pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143519 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fdb80d-d467-44d7-9859-efcc2fff59e6-trusted-ca\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143547 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dvkw\" (UniqueName: \"kubernetes.io/projected/9592f7ec-a684-4b31-97b3-32c3439a8ee0-kube-api-access-2dvkw\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.143857 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.145679 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-service-ca-bundle\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.145728 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-config\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.145800 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9592f7ec-a684-4b31-97b3-32c3439a8ee0-serving-cert\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.146051 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.146505 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.146939 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-config\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.147765 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.148460 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1bff448-76f4-4bda-a346-9ba668d4d05c-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.148493 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.148675 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/06fdb80d-d467-44d7-9859-efcc2fff59e6-trusted-ca\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.148844 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-g8p2l"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.148855 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/456f2042-395a-424b-80c1-3bc40286210d-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-jp7tg\" (UID: \"456f2042-395a-424b-80c1-3bc40286210d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.149416 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/06fdb80d-d467-44d7-9859-efcc2fff59e6-metrics-tls\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.150024 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1bff448-76f4-4bda-a346-9ba668d4d05c-serving-cert\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.150381 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.150765 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/06f5797c-1640-413d-882d-957c10daea4c-metrics-tls\") pod \"dns-operator-744455d44c-thprd\" (UID: \"06f5797c-1640-413d-882d-957c10daea4c\") " pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.151062 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-blbp4"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.152540 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.153131 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.153997 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.155547 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-q7x6z"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.156417 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.157888 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mzlgx"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.159527 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.161007 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6hchn"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.162618 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-z2zxv"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.163992 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.165381 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.166961 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-g8p2l"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.167700 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.168603 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.169885 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-q7x6z"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.170890 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-zgvxk"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.171590 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.171698 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-zgvxk"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.192947 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.207844 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.229342 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.267117 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.267630 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lr8k\" (UniqueName: \"kubernetes.io/projected/7d4d7acd-916e-45d1-8d4d-990c386d806d-kube-api-access-7lr8k\") pod \"apiserver-76f77b778f-ch5jb\" (UID: \"7d4d7acd-916e-45d1-8d4d-990c386d806d\") " pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.287335 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.306966 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.327643 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.338763 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-etcd-client\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.349003 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.357392 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-encryption-config\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.366930 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.377888 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c410054-569a-4d33-8e55-87f28ba661f6-serving-cert\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.387010 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.406987 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.414675 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.427625 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.436553 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-audit-policies\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.449660 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.457269 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c410054-569a-4d33-8e55-87f28ba661f6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.467123 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.487762 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.500585 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.507400 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.527767 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.548884 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.568757 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.588197 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.608636 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.628876 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.649397 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.667998 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.687860 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.708160 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.727398 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.746417 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ch5jb"] Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.748095 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 09 00:09:06 crc kubenswrapper[4810]: W1009 00:09:06.755323 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d4d7acd_916e_45d1_8d4d_990c386d806d.slice/crio-abf1442a05ad7da1c4b1792849256dd16b879c983b5fe5a6482a12539fb85de7 WatchSource:0}: Error finding container abf1442a05ad7da1c4b1792849256dd16b879c983b5fe5a6482a12539fb85de7: Status 404 returned error can't find the container with id abf1442a05ad7da1c4b1792849256dd16b879c983b5fe5a6482a12539fb85de7 Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.767525 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.787535 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.807774 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.828371 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.847566 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.867885 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.887598 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.907503 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.927616 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.947687 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.968056 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 09 00:09:06 crc kubenswrapper[4810]: I1009 00:09:06.987145 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.007698 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.027342 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.041530 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" event={"ID":"7d4d7acd-916e-45d1-8d4d-990c386d806d","Type":"ContainerStarted","Data":"98376b151de37036c59809d16125fdf9cd5d2323d6515194ed168b95459c4136"} Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.041844 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" event={"ID":"7d4d7acd-916e-45d1-8d4d-990c386d806d","Type":"ContainerStarted","Data":"abf1442a05ad7da1c4b1792849256dd16b879c983b5fe5a6482a12539fb85de7"} Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.048168 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.068083 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.086281 4810 request.go:700] Waited for 1.017629073s due to client-side throttling, not priority and fairness, request: PATCH:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-cluster-samples-operator/pods/cluster-samples-operator-665b6dd947-4mpg7/status Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.108081 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.127593 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.148414 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.168094 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.188198 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.207524 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.226995 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.246719 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.268171 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.287622 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.306774 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.328981 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.346934 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.367753 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.388058 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.407673 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.427576 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.446775 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.467995 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.487956 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.506809 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.528196 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.547483 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.568705 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.588077 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.608179 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.628100 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.648788 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.668520 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.701176 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.721002 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.727299 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763626 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-machine-approver-tls\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763698 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kndwc\" (UniqueName: \"kubernetes.io/projected/08622f6b-de6d-48bc-998d-435e09a52226-kube-api-access-kndwc\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763739 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlfnv\" (UniqueName: \"kubernetes.io/projected/2b7db5ab-0ff2-457c-8daf-53c06748f04e-kube-api-access-vlfnv\") pod \"downloads-7954f5f757-685td\" (UID: \"2b7db5ab-0ff2-457c-8daf-53c06748f04e\") " pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763776 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k59h6\" (UniqueName: \"kubernetes.io/projected/788a06a3-b34e-460e-a981-67130389de67-kube-api-access-k59h6\") pod \"image-pruner-29332800-wtmbd\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763811 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690a8646-257c-49be-b693-e151a1bab532-config\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763877 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/690a8646-257c-49be-b693-e151a1bab532-images\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763908 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-config\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763941 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-bound-sa-token\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.763976 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/9129ece3-62c9-4ce5-b1ce-43dd2f55b036-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4mpg7\" (UID: \"9129ece3-62c9-4ce5-b1ce-43dd2f55b036\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764009 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764042 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764078 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764110 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-config\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764235 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764329 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/788a06a3-b34e-460e-a981-67130389de67-serviceca\") pod \"image-pruner-29332800-wtmbd\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.764354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-policies\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: E1009 00:09:07.764768 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.264744949 +0000 UTC m=+145.790383700 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765012 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765071 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-tls\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765092 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a2bb4c7a-46ee-4294-ac9f-97a89488515d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765149 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-default-certificate\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765174 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/690a8646-257c-49be-b693-e151a1bab532-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765193 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-client-ca\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765212 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-auth-proxy-config\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765274 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-trusted-ca\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765299 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765319 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftm7c\" (UniqueName: \"kubernetes.io/projected/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-kube-api-access-ftm7c\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765360 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75eef79f-6ade-40d4-8bf9-768a5fe06edc-serving-cert\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765404 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-certificates\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.765479 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767210 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs4xm\" (UniqueName: \"kubernetes.io/projected/690a8646-257c-49be-b693-e151a1bab532-kube-api-access-gs4xm\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767374 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767406 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767504 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a2bb4c7a-46ee-4294-ac9f-97a89488515d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767563 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz42n\" (UniqueName: \"kubernetes.io/projected/a9b072a5-a708-4f29-9aae-f52e98802f1c-kube-api-access-pz42n\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767625 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-stats-auth\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767694 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767778 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/75eef79f-6ade-40d4-8bf9-768a5fe06edc-trusted-ca\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767834 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767885 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767910 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djnj7\" (UniqueName: \"kubernetes.io/projected/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-kube-api-access-djnj7\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767936 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767961 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.767996 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9ffq\" (UniqueName: \"kubernetes.io/projected/9129ece3-62c9-4ce5-b1ce-43dd2f55b036-kube-api-access-x9ffq\") pod \"cluster-samples-operator-665b6dd947-4mpg7\" (UID: \"9129ece3-62c9-4ce5-b1ce-43dd2f55b036\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768018 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768038 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-serving-cert\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768059 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmfqs\" (UniqueName: \"kubernetes.io/projected/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-kube-api-access-jmfqs\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768118 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75eef79f-6ade-40d4-8bf9-768a5fe06edc-config\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768179 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhvr4\" (UniqueName: \"kubernetes.io/projected/75eef79f-6ade-40d4-8bf9-768a5fe06edc-kube-api-access-fhvr4\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768231 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768277 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-dir\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768320 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08622f6b-de6d-48bc-998d-435e09a52226-service-ca-bundle\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-metrics-certs\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.768399 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjn9g\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-kube-api-access-cjn9g\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.791302 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.809209 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.828190 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.847934 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869471 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869606 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/788a06a3-b34e-460e-a981-67130389de67-serviceca\") pod \"image-pruner-29332800-wtmbd\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:07 crc kubenswrapper[4810]: E1009 00:09:07.869637 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.369613254 +0000 UTC m=+145.895251955 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869678 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-policies\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869754 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h65w4\" (UniqueName: \"kubernetes.io/projected/e238e120-5ce2-4690-87ca-a950ad8afa8c-kube-api-access-h65w4\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869778 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a2bb4c7a-46ee-4294-ac9f-97a89488515d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869794 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869812 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-registration-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869846 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e5bc3465-32cf-4253-8a62-d05b03903515-webhook-cert\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869877 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/681c7462-9aad-4dda-abb5-541f675d83a8-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869896 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/690a8646-257c-49be-b693-e151a1bab532-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869910 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/057eac7b-cf2e-4d17-9dfc-cfae85b2e26b-cert\") pod \"ingress-canary-zgvxk\" (UID: \"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b\") " pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869925 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/065c4541-9a8f-4e9b-95c8-621fc0c1ef9b-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-blbp4\" (UID: \"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869945 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-trusted-ca\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869961 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftm7c\" (UniqueName: \"kubernetes.io/projected/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-kube-api-access-ftm7c\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.869985 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75eef79f-6ade-40d4-8bf9-768a5fe06edc-serving-cert\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870000 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-console-config\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870018 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870033 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e5bc3465-32cf-4253-8a62-d05b03903515-apiservice-cert\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870053 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-certificates\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870075 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870104 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs4xm\" (UniqueName: \"kubernetes.io/projected/690a8646-257c-49be-b693-e151a1bab532-kube-api-access-gs4xm\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870135 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb13d08-4b13-48c5-a5a9-84407effd402-console-serving-cert\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870161 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a2bb4c7a-46ee-4294-ac9f-97a89488515d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870184 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2eb13d08-4b13-48c5-a5a9-84407effd402-console-oauth-config\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870213 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz42n\" (UniqueName: \"kubernetes.io/projected/a9b072a5-a708-4f29-9aae-f52e98802f1c-kube-api-access-pz42n\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870230 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6bvf\" (UniqueName: \"kubernetes.io/projected/bb7dae35-5849-4f2f-88df-a5f815c4d2f5-kube-api-access-b6bvf\") pod \"package-server-manager-789f6589d5-qlkkp\" (UID: \"bb7dae35-5849-4f2f-88df-a5f815c4d2f5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870244 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-socket-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870258 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-oauth-serving-cert\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870274 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6xcp\" (UniqueName: \"kubernetes.io/projected/2eb13d08-4b13-48c5-a5a9-84407effd402-kube-api-access-n6xcp\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870318 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c8d4552a-7960-4504-bd01-aaa15082c8ab-srv-cert\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870336 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/75eef79f-6ade-40d4-8bf9-768a5fe06edc-trusted-ca\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870353 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870369 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1884cc58-91e6-45ba-b5f0-061c85b25798-signing-key\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870389 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/681c7462-9aad-4dda-abb5-541f675d83a8-config\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870395 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-policies\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870419 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870438 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djnj7\" (UniqueName: \"kubernetes.io/projected/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-kube-api-access-djnj7\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870474 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870498 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-serving-cert\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870514 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f942d602-c4fc-47a2-82d5-abc83eb4472b-config-volume\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870532 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfc66780-664e-4251-af70-c8b690092170-certs\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870556 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75eef79f-6ade-40d4-8bf9-768a5fe06edc-config\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870574 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcq65\" (UniqueName: \"kubernetes.io/projected/c82928f7-de15-44e5-a60d-2494d90cc554-kube-api-access-qcq65\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870589 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82928f7-de15-44e5-a60d-2494d90cc554-config-volume\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870616 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhvr4\" (UniqueName: \"kubernetes.io/projected/75eef79f-6ade-40d4-8bf9-768a5fe06edc-kube-api-access-fhvr4\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870632 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-service-ca\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870648 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/76be45bb-06fd-4e17-859a-8522d12fa162-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870664 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4pcl\" (UniqueName: \"kubernetes.io/projected/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-kube-api-access-b4pcl\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870690 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e5bc3465-32cf-4253-8a62-d05b03903515-tmpfs\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870710 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-dir\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870729 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jv5h\" (UniqueName: \"kubernetes.io/projected/82a04500-5006-4149-a4db-1982b49a1fcd-kube-api-access-7jv5h\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870745 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60ab3d72-24f7-490e-930e-1e60fdbfe74b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870765 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-metrics-certs\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870781 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-images\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870801 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjn9g\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-kube-api-access-cjn9g\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870831 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/400482aa-2212-4b4d-acba-af4c61fb24f7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870856 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-service-ca\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870872 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slv9b\" (UniqueName: \"kubernetes.io/projected/6779aaef-a11f-4301-91cb-bd26489f7408-kube-api-access-slv9b\") pod \"migrator-59844c95c7-ssmlk\" (UID: \"6779aaef-a11f-4301-91cb-bd26489f7408\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870910 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlfnv\" (UniqueName: \"kubernetes.io/projected/2b7db5ab-0ff2-457c-8daf-53c06748f04e-kube-api-access-vlfnv\") pod \"downloads-7954f5f757-685td\" (UID: \"2b7db5ab-0ff2-457c-8daf-53c06748f04e\") " pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870926 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcrkh\" (UniqueName: \"kubernetes.io/projected/400482aa-2212-4b4d-acba-af4c61fb24f7-kube-api-access-hcrkh\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870945 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k59h6\" (UniqueName: \"kubernetes.io/projected/788a06a3-b34e-460e-a981-67130389de67-kube-api-access-k59h6\") pod \"image-pruner-29332800-wtmbd\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870960 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690a8646-257c-49be-b693-e151a1bab532-config\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870976 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.870993 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-config\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871009 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvnxw\" (UniqueName: \"kubernetes.io/projected/e5bc3465-32cf-4253-8a62-d05b03903515-kube-api-access-rvnxw\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871024 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/400482aa-2212-4b4d-acba-af4c61fb24f7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871040 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6vxk\" (UniqueName: \"kubernetes.io/projected/1884cc58-91e6-45ba-b5f0-061c85b25798-kube-api-access-k6vxk\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871075 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871092 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-client\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871113 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb7dae35-5849-4f2f-88df-a5f815c4d2f5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qlkkp\" (UID: \"bb7dae35-5849-4f2f-88df-a5f815c4d2f5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871136 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qjfl\" (UniqueName: \"kubernetes.io/projected/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-kube-api-access-4qjfl\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871157 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfgc4\" (UniqueName: \"kubernetes.io/projected/a8e183dc-1f17-4b0f-a177-e4a17569b307-kube-api-access-zfgc4\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871182 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-tls\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871210 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-client-ca\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871230 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-auth-proxy-config\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871252 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr5h6\" (UniqueName: \"kubernetes.io/projected/62454395-5349-44a7-8f9e-4acfe7a271cd-kube-api-access-jr5h6\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871280 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-default-certificate\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871301 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871319 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62454395-5349-44a7-8f9e-4acfe7a271cd-serving-cert\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871372 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-csi-data-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871398 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r26q\" (UniqueName: \"kubernetes.io/projected/8f995ff5-64f5-41c4-aa59-0521d9cd2328-kube-api-access-4r26q\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871416 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60ab3d72-24f7-490e-930e-1e60fdbfe74b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871450 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82928f7-de15-44e5-a60d-2494d90cc554-secret-volume\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871469 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62454395-5349-44a7-8f9e-4acfe7a271cd-config\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871487 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfc66780-664e-4251-af70-c8b690092170-node-bootstrap-token\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871505 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkhwd\" (UniqueName: \"kubernetes.io/projected/bfc66780-664e-4251-af70-c8b690092170-kube-api-access-tkhwd\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871536 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw2jz\" (UniqueName: \"kubernetes.io/projected/c8d4552a-7960-4504-bd01-aaa15082c8ab-kube-api-access-kw2jz\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871550 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/788a06a3-b34e-460e-a981-67130389de67-serviceca\") pod \"image-pruner-29332800-wtmbd\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871554 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60ab3d72-24f7-490e-930e-1e60fdbfe74b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871620 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f942d602-c4fc-47a2-82d5-abc83eb4472b-metrics-tls\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871645 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-config\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871664 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-stats-auth\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871687 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/76be45bb-06fd-4e17-859a-8522d12fa162-proxy-tls\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871716 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871735 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/681c7462-9aad-4dda-abb5-541f675d83a8-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871756 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-plugins-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871773 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871791 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871811 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9ffq\" (UniqueName: \"kubernetes.io/projected/9129ece3-62c9-4ce5-b1ce-43dd2f55b036-kube-api-access-x9ffq\") pod \"cluster-samples-operator-665b6dd947-4mpg7\" (UID: \"9129ece3-62c9-4ce5-b1ce-43dd2f55b036\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871848 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871865 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmfqs\" (UniqueName: \"kubernetes.io/projected/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-kube-api-access-jmfqs\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871882 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871921 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-ca\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871937 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8e183dc-1f17-4b0f-a177-e4a17569b307-profile-collector-cert\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871979 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.871979 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a2bb4c7a-46ee-4294-ac9f-97a89488515d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.872015 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f995ff5-64f5-41c4-aa59-0521d9cd2328-serving-cert\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.872901 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-mountpoint-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.872923 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8e183dc-1f17-4b0f-a177-e4a17569b307-srv-cert\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.872979 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvw82\" (UniqueName: \"kubernetes.io/projected/065c4541-9a8f-4e9b-95c8-621fc0c1ef9b-kube-api-access-jvw82\") pod \"multus-admission-controller-857f4d67dd-blbp4\" (UID: \"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873011 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08622f6b-de6d-48bc-998d-435e09a52226-service-ca-bundle\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873033 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1884cc58-91e6-45ba-b5f0-061c85b25798-signing-cabundle\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873068 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzr6s\" (UniqueName: \"kubernetes.io/projected/76be45bb-06fd-4e17-859a-8522d12fa162-kube-api-access-bzr6s\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873093 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873115 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e238e120-5ce2-4690-87ca-a950ad8afa8c-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873140 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p666r\" (UniqueName: \"kubernetes.io/projected/057eac7b-cf2e-4d17-9dfc-cfae85b2e26b-kube-api-access-p666r\") pod \"ingress-canary-zgvxk\" (UID: \"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b\") " pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873157 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-proxy-tls\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873172 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873190 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-machine-approver-tls\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873206 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kndwc\" (UniqueName: \"kubernetes.io/projected/08622f6b-de6d-48bc-998d-435e09a52226-kube-api-access-kndwc\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873236 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/690a8646-257c-49be-b693-e151a1bab532-images\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873259 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-config\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873277 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c8d4552a-7960-4504-bd01-aaa15082c8ab-profile-collector-cert\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873295 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67fm5\" (UniqueName: \"kubernetes.io/projected/f942d602-c4fc-47a2-82d5-abc83eb4472b-kube-api-access-67fm5\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873313 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-trusted-ca-bundle\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873331 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-bound-sa-token\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873348 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/9129ece3-62c9-4ce5-b1ce-43dd2f55b036-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4mpg7\" (UID: \"9129ece3-62c9-4ce5-b1ce-43dd2f55b036\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873366 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873396 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-config\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873417 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e238e120-5ce2-4690-87ca-a950ad8afa8c-serving-cert\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.873655 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/690a8646-257c-49be-b693-e151a1bab532-config\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.874943 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.875996 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.876411 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.875810 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/75eef79f-6ade-40d4-8bf9-768a5fe06edc-trusted-ca\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.877546 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-stats-auth\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.878809 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-certificates\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.880454 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-client-ca\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.880656 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.880743 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08622f6b-de6d-48bc-998d-435e09a52226-service-ca-bundle\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.882262 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/690a8646-257c-49be-b693-e151a1bab532-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.882349 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-tls\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.883082 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-trusted-ca\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.883230 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.883385 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.883719 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-dir\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.884017 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75eef79f-6ade-40d4-8bf9-768a5fe06edc-config\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.884690 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-serving-cert\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.885491 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.885984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.886601 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.886661 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-auth-proxy-config\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.886984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-config\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.887097 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75eef79f-6ade-40d4-8bf9-768a5fe06edc-serving-cert\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.887324 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.887637 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/690a8646-257c-49be-b693-e151a1bab532-images\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.888531 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-default-certificate\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.889408 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-config\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.889444 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/9129ece3-62c9-4ce5-b1ce-43dd2f55b036-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4mpg7\" (UID: \"9129ece3-62c9-4ce5-b1ce-43dd2f55b036\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.890064 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.890187 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a2bb4c7a-46ee-4294-ac9f-97a89488515d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.890722 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.890805 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/08622f6b-de6d-48bc-998d-435e09a52226-metrics-certs\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.891028 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.891176 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-machine-approver-tls\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.906239 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdv9k\" (UniqueName: \"kubernetes.io/projected/e1bff448-76f4-4bda-a346-9ba668d4d05c-kube-api-access-sdv9k\") pod \"authentication-operator-69f744f599-trv84\" (UID: \"e1bff448-76f4-4bda-a346-9ba668d4d05c\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.909587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rm7g\" (UniqueName: \"kubernetes.io/projected/1e9a257e-bb41-452c-9b3f-cbef1c754d1f-kube-api-access-9rm7g\") pod \"kube-storage-version-migrator-operator-b67b599dd-6zdk4\" (UID: \"1e9a257e-bb41-452c-9b3f-cbef1c754d1f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.925707 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.931997 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf7w2\" (UniqueName: \"kubernetes.io/projected/456f2042-395a-424b-80c1-3bc40286210d-kube-api-access-mf7w2\") pod \"control-plane-machine-set-operator-78cbb6b69f-jp7tg\" (UID: \"456f2042-395a-424b-80c1-3bc40286210d\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.934124 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.943944 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/06fdb80d-d467-44d7-9859-efcc2fff59e6-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.965841 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dvkw\" (UniqueName: \"kubernetes.io/projected/9592f7ec-a684-4b31-97b3-32c3439a8ee0-kube-api-access-2dvkw\") pod \"controller-manager-879f6c89f-mzlgx\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974115 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974163 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-ca\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974185 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8e183dc-1f17-4b0f-a177-e4a17569b307-profile-collector-cert\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974215 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f995ff5-64f5-41c4-aa59-0521d9cd2328-serving-cert\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974234 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-mountpoint-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974261 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8e183dc-1f17-4b0f-a177-e4a17569b307-srv-cert\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974280 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvw82\" (UniqueName: \"kubernetes.io/projected/065c4541-9a8f-4e9b-95c8-621fc0c1ef9b-kube-api-access-jvw82\") pod \"multus-admission-controller-857f4d67dd-blbp4\" (UID: \"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974309 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1884cc58-91e6-45ba-b5f0-061c85b25798-signing-cabundle\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974329 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzr6s\" (UniqueName: \"kubernetes.io/projected/76be45bb-06fd-4e17-859a-8522d12fa162-kube-api-access-bzr6s\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974348 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974370 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e238e120-5ce2-4690-87ca-a950ad8afa8c-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974392 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p666r\" (UniqueName: \"kubernetes.io/projected/057eac7b-cf2e-4d17-9dfc-cfae85b2e26b-kube-api-access-p666r\") pod \"ingress-canary-zgvxk\" (UID: \"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b\") " pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974416 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-proxy-tls\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974440 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974469 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c8d4552a-7960-4504-bd01-aaa15082c8ab-profile-collector-cert\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974490 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67fm5\" (UniqueName: \"kubernetes.io/projected/f942d602-c4fc-47a2-82d5-abc83eb4472b-kube-api-access-67fm5\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974504 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-trusted-ca-bundle\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974520 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e238e120-5ce2-4690-87ca-a950ad8afa8c-serving-cert\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974549 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974565 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h65w4\" (UniqueName: \"kubernetes.io/projected/e238e120-5ce2-4690-87ca-a950ad8afa8c-kube-api-access-h65w4\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974580 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974596 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/681c7462-9aad-4dda-abb5-541f675d83a8-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974609 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-registration-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974623 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e5bc3465-32cf-4253-8a62-d05b03903515-webhook-cert\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974639 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/057eac7b-cf2e-4d17-9dfc-cfae85b2e26b-cert\") pod \"ingress-canary-zgvxk\" (UID: \"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b\") " pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974659 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/065c4541-9a8f-4e9b-95c8-621fc0c1ef9b-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-blbp4\" (UID: \"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974687 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-console-config\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974728 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e5bc3465-32cf-4253-8a62-d05b03903515-apiservice-cert\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974762 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb13d08-4b13-48c5-a5a9-84407effd402-console-serving-cert\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974791 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6bvf\" (UniqueName: \"kubernetes.io/projected/bb7dae35-5849-4f2f-88df-a5f815c4d2f5-kube-api-access-b6bvf\") pod \"package-server-manager-789f6589d5-qlkkp\" (UID: \"bb7dae35-5849-4f2f-88df-a5f815c4d2f5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974809 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2eb13d08-4b13-48c5-a5a9-84407effd402-console-oauth-config\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974851 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-socket-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974871 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-oauth-serving-cert\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974892 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6xcp\" (UniqueName: \"kubernetes.io/projected/2eb13d08-4b13-48c5-a5a9-84407effd402-kube-api-access-n6xcp\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974913 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1884cc58-91e6-45ba-b5f0-061c85b25798-signing-key\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974933 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c8d4552a-7960-4504-bd01-aaa15082c8ab-srv-cert\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.974955 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/681c7462-9aad-4dda-abb5-541f675d83a8-config\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975003 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f942d602-c4fc-47a2-82d5-abc83eb4472b-config-volume\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975025 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcq65\" (UniqueName: \"kubernetes.io/projected/c82928f7-de15-44e5-a60d-2494d90cc554-kube-api-access-qcq65\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975049 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfc66780-664e-4251-af70-c8b690092170-certs\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975071 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82928f7-de15-44e5-a60d-2494d90cc554-config-volume\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975099 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-service-ca\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975121 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/76be45bb-06fd-4e17-859a-8522d12fa162-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975142 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4pcl\" (UniqueName: \"kubernetes.io/projected/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-kube-api-access-b4pcl\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975175 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e5bc3465-32cf-4253-8a62-d05b03903515-tmpfs\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975198 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jv5h\" (UniqueName: \"kubernetes.io/projected/82a04500-5006-4149-a4db-1982b49a1fcd-kube-api-access-7jv5h\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975219 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60ab3d72-24f7-490e-930e-1e60fdbfe74b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975240 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-images\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975269 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/400482aa-2212-4b4d-acba-af4c61fb24f7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975289 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-service-ca\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975310 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slv9b\" (UniqueName: \"kubernetes.io/projected/6779aaef-a11f-4301-91cb-bd26489f7408-kube-api-access-slv9b\") pod \"migrator-59844c95c7-ssmlk\" (UID: \"6779aaef-a11f-4301-91cb-bd26489f7408\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975349 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcrkh\" (UniqueName: \"kubernetes.io/projected/400482aa-2212-4b4d-acba-af4c61fb24f7-kube-api-access-hcrkh\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975369 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-config\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975389 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvnxw\" (UniqueName: \"kubernetes.io/projected/e5bc3465-32cf-4253-8a62-d05b03903515-kube-api-access-rvnxw\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975412 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/400482aa-2212-4b4d-acba-af4c61fb24f7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975435 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6vxk\" (UniqueName: \"kubernetes.io/projected/1884cc58-91e6-45ba-b5f0-061c85b25798-kube-api-access-k6vxk\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975457 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-client\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975479 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb7dae35-5849-4f2f-88df-a5f815c4d2f5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qlkkp\" (UID: \"bb7dae35-5849-4f2f-88df-a5f815c4d2f5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975505 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qjfl\" (UniqueName: \"kubernetes.io/projected/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-kube-api-access-4qjfl\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975527 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfgc4\" (UniqueName: \"kubernetes.io/projected/a8e183dc-1f17-4b0f-a177-e4a17569b307-kube-api-access-zfgc4\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975551 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr5h6\" (UniqueName: \"kubernetes.io/projected/62454395-5349-44a7-8f9e-4acfe7a271cd-kube-api-access-jr5h6\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975580 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62454395-5349-44a7-8f9e-4acfe7a271cd-serving-cert\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975601 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r26q\" (UniqueName: \"kubernetes.io/projected/8f995ff5-64f5-41c4-aa59-0521d9cd2328-kube-api-access-4r26q\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975622 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-csi-data-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975643 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82928f7-de15-44e5-a60d-2494d90cc554-secret-volume\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975661 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62454395-5349-44a7-8f9e-4acfe7a271cd-config\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975682 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60ab3d72-24f7-490e-930e-1e60fdbfe74b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975701 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfc66780-664e-4251-af70-c8b690092170-node-bootstrap-token\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975733 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw2jz\" (UniqueName: \"kubernetes.io/projected/c8d4552a-7960-4504-bd01-aaa15082c8ab-kube-api-access-kw2jz\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975753 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60ab3d72-24f7-490e-930e-1e60fdbfe74b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975775 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkhwd\" (UniqueName: \"kubernetes.io/projected/bfc66780-664e-4251-af70-c8b690092170-kube-api-access-tkhwd\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975797 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f942d602-c4fc-47a2-82d5-abc83eb4472b-metrics-tls\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975836 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/76be45bb-06fd-4e17-859a-8522d12fa162-proxy-tls\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975861 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-config\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975885 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/681c7462-9aad-4dda-abb5-541f675d83a8-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975909 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-plugins-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.975931 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.976732 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-ca\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.977346 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.978328 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-service-ca\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.979988 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f995ff5-64f5-41c4-aa59-0521d9cd2328-serving-cert\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.980257 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/bfc66780-664e-4251-af70-c8b690092170-certs\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.980911 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/76be45bb-06fd-4e17-859a-8522d12fa162-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.980461 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-proxy-tls\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.980508 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-mountpoint-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.980750 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8e183dc-1f17-4b0f-a177-e4a17569b307-profile-collector-cert\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.980463 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/60ab3d72-24f7-490e-930e-1e60fdbfe74b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.981991 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-images\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.982344 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82928f7-de15-44e5-a60d-2494d90cc554-config-volume\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.982562 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/1884cc58-91e6-45ba-b5f0-061c85b25798-signing-cabundle\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.982662 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/681c7462-9aad-4dda-abb5-541f675d83a8-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.983125 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62454395-5349-44a7-8f9e-4acfe7a271cd-config\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.983328 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-console-config\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.983446 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e5bc3465-32cf-4253-8a62-d05b03903515-tmpfs\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.983564 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-registration-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: E1009 00:09:07.983855 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.483809426 +0000 UTC m=+146.009448137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.984106 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/60ab3d72-24f7-490e-930e-1e60fdbfe74b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.985325 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fqzl\" (UniqueName: \"kubernetes.io/projected/06f5797c-1640-413d-882d-957c10daea4c-kube-api-access-5fqzl\") pod \"dns-operator-744455d44c-thprd\" (UID: \"06f5797c-1640-413d-882d-957c10daea4c\") " pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.986220 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e238e120-5ce2-4690-87ca-a950ad8afa8c-serving-cert\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.986541 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82928f7-de15-44e5-a60d-2494d90cc554-secret-volume\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.986594 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/065c4541-9a8f-4e9b-95c8-621fc0c1ef9b-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-blbp4\" (UID: \"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.986633 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c8d4552a-7960-4504-bd01-aaa15082c8ab-profile-collector-cert\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.986871 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-service-ca\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.987320 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f995ff5-64f5-41c4-aa59-0521d9cd2328-config\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.987406 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.987710 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-oauth-serving-cert\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.987973 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/400482aa-2212-4b4d-acba-af4c61fb24f7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.988322 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/681c7462-9aad-4dda-abb5-541f675d83a8-config\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.988990 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/bfc66780-664e-4251-af70-c8b690092170-node-bootstrap-token\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.989116 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-config\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.989157 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-plugins-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.989298 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.990032 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e5bc3465-32cf-4253-8a62-d05b03903515-webhook-cert\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.990387 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.990488 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.990539 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-csi-data-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.991397 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2eb13d08-4b13-48c5-a5a9-84407effd402-trusted-ca-bundle\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992048 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62454395-5349-44a7-8f9e-4acfe7a271cd-serving-cert\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992075 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c8d4552a-7960-4504-bd01-aaa15082c8ab-srv-cert\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992460 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e5bc3465-32cf-4253-8a62-d05b03903515-apiservice-cert\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992488 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/76be45bb-06fd-4e17-859a-8522d12fa162-proxy-tls\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992492 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8f995ff5-64f5-41c4-aa59-0521d9cd2328-etcd-client\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992650 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e238e120-5ce2-4690-87ca-a950ad8afa8c-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992712 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/400482aa-2212-4b4d-acba-af4c61fb24f7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.992732 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-socket-dir\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.993661 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8e183dc-1f17-4b0f-a177-e4a17569b307-srv-cert\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.994025 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2eb13d08-4b13-48c5-a5a9-84407effd402-console-serving-cert\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.995042 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/1884cc58-91e6-45ba-b5f0-061c85b25798-signing-key\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.996348 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2eb13d08-4b13-48c5-a5a9-84407effd402-console-oauth-config\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.996896 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:07 crc kubenswrapper[4810]: I1009 00:09:07.999756 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bb7dae35-5849-4f2f-88df-a5f815c4d2f5-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qlkkp\" (UID: \"bb7dae35-5849-4f2f-88df-a5f815c4d2f5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.005618 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26mdw\" (UniqueName: \"kubernetes.io/projected/06fdb80d-d467-44d7-9859-efcc2fff59e6-kube-api-access-26mdw\") pod \"ingress-operator-5b745b69d9-gf69h\" (UID: \"06fdb80d-d467-44d7-9859-efcc2fff59e6\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.029427 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vv4n\" (UniqueName: \"kubernetes.io/projected/7c410054-569a-4d33-8e55-87f28ba661f6-kube-api-access-6vv4n\") pod \"apiserver-7bbb656c7d-55r74\" (UID: \"7c410054-569a-4d33-8e55-87f28ba661f6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.047350 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6nbw\" (UniqueName: \"kubernetes.io/projected/a9f2d63d-ad97-4ee1-8acd-68dd9fc22994-kube-api-access-t6nbw\") pod \"openshift-apiserver-operator-796bbdcf4f-g9tnq\" (UID: \"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.048350 4810 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.049589 4810 generic.go:334] "Generic (PLEG): container finished" podID="7d4d7acd-916e-45d1-8d4d-990c386d806d" containerID="98376b151de37036c59809d16125fdf9cd5d2323d6515194ed168b95459c4136" exitCode=0 Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.049624 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" event={"ID":"7d4d7acd-916e-45d1-8d4d-990c386d806d","Type":"ContainerDied","Data":"98376b151de37036c59809d16125fdf9cd5d2323d6515194ed168b95459c4136"} Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.049654 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" event={"ID":"7d4d7acd-916e-45d1-8d4d-990c386d806d","Type":"ContainerStarted","Data":"059e90378099aaa2ce3b3506c00be7804162b8c21161408be4d752bf16f4c0b8"} Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.049667 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" event={"ID":"7d4d7acd-916e-45d1-8d4d-990c386d806d","Type":"ContainerStarted","Data":"21bc5f3ac75e674d4f82f06273753f5142784b954f4315e916abe0e3e34b54bb"} Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.073795 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.077045 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.077213 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.577188971 +0000 UTC m=+146.102827672 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.077938 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.078465 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.578458178 +0000 UTC m=+146.104096879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.088005 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.105925 4810 request.go:700] Waited for 1.94928074s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-dockercfg-jwfmh&limit=500&resourceVersion=0 Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.107317 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.133982 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.140419 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f942d602-c4fc-47a2-82d5-abc83eb4472b-metrics-tls\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.145601 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-trv84"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.147751 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.152755 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f942d602-c4fc-47a2-82d5-abc83eb4472b-config-volume\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.172478 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.175732 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.179666 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.180459 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.680427438 +0000 UTC m=+146.206066179 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.186008 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/057eac7b-cf2e-4d17-9dfc-cfae85b2e26b-cert\") pod \"ingress-canary-zgvxk\" (UID: \"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b\") " pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.188572 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.193718 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4"] Oct 09 00:09:08 crc kubenswrapper[4810]: W1009 00:09:08.202128 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1e9a257e_bb41_452c_9b3f_cbef1c754d1f.slice/crio-e1850e3eba316141b81047916598d43144a96fa18f85886d3b64f06d99e80fa8 WatchSource:0}: Error finding container e1850e3eba316141b81047916598d43144a96fa18f85886d3b64f06d99e80fa8: Status 404 returned error can't find the container with id e1850e3eba316141b81047916598d43144a96fa18f85886d3b64f06d99e80fa8 Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.208693 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.212378 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-thprd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.227110 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.267444 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.283368 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.285162 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg"] Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.287148 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.783943733 +0000 UTC m=+146.309582434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.291887 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjn9g\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-kube-api-access-cjn9g\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.305270 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.307324 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.324953 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs4xm\" (UniqueName: \"kubernetes.io/projected/690a8646-257c-49be-b693-e151a1bab532-kube-api-access-gs4xm\") pod \"machine-api-operator-5694c8668f-5jswd\" (UID: \"690a8646-257c-49be-b693-e151a1bab532\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.349754 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9ffq\" (UniqueName: \"kubernetes.io/projected/9129ece3-62c9-4ce5-b1ce-43dd2f55b036-kube-api-access-x9ffq\") pod \"cluster-samples-operator-665b6dd947-4mpg7\" (UID: \"9129ece3-62c9-4ce5-b1ce-43dd2f55b036\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.364867 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz42n\" (UniqueName: \"kubernetes.io/projected/a9b072a5-a708-4f29-9aae-f52e98802f1c-kube-api-access-pz42n\") pod \"oauth-openshift-558db77b4-lsgnf\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.384306 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.384957 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.884938634 +0000 UTC m=+146.410577335 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.385119 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlfnv\" (UniqueName: \"kubernetes.io/projected/2b7db5ab-0ff2-457c-8daf-53c06748f04e-kube-api-access-vlfnv\") pod \"downloads-7954f5f757-685td\" (UID: \"2b7db5ab-0ff2-457c-8daf-53c06748f04e\") " pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.405796 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k59h6\" (UniqueName: \"kubernetes.io/projected/788a06a3-b34e-460e-a981-67130389de67-kube-api-access-k59h6\") pod \"image-pruner-29332800-wtmbd\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.411420 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.421350 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djnj7\" (UniqueName: \"kubernetes.io/projected/d4af9ab8-8625-4c14-a0af-a566bb0ad0ed-kube-api-access-djnj7\") pod \"cluster-image-registry-operator-dc59b4c8b-2wxs8\" (UID: \"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.441531 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftm7c\" (UniqueName: \"kubernetes.io/projected/d7e6e1a2-50a6-4b46-a064-206b6ae1fa39-kube-api-access-ftm7c\") pod \"machine-approver-56656f9798-s8k6h\" (UID: \"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.455019 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.468492 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mzlgx"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.479433 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.483907 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kndwc\" (UniqueName: \"kubernetes.io/projected/08622f6b-de6d-48bc-998d-435e09a52226-kube-api-access-kndwc\") pod \"router-default-5444994796-78fsr\" (UID: \"08622f6b-de6d-48bc-998d-435e09a52226\") " pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.485741 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.485929 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-thprd"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.488183 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.494808 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhvr4\" (UniqueName: \"kubernetes.io/projected/75eef79f-6ade-40d4-8bf9-768a5fe06edc-kube-api-access-fhvr4\") pod \"console-operator-58897d9998-tmpb8\" (UID: \"75eef79f-6ade-40d4-8bf9-768a5fe06edc\") " pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.495131 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:08.995116746 +0000 UTC m=+146.520755447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.503077 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.504061 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmfqs\" (UniqueName: \"kubernetes.io/projected/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-kube-api-access-jmfqs\") pod \"route-controller-manager-6576b87f9c-rvkhn\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.521487 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-bound-sa-token\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.533083 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.542217 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-mfjlm\" (UID: \"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.559555 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74"] Oct 09 00:09:08 crc kubenswrapper[4810]: W1009 00:09:08.564928 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06f5797c_1640_413d_882d_957c10daea4c.slice/crio-c8b66f9ff6b77c9e393bde319605d0febb9a11a4c14d687040951e3046af994f WatchSource:0}: Error finding container c8b66f9ff6b77c9e393bde319605d0febb9a11a4c14d687040951e3046af994f: Status 404 returned error can't find the container with id c8b66f9ff6b77c9e393bde319605d0febb9a11a4c14d687040951e3046af994f Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.574978 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.582338 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jv5h\" (UniqueName: \"kubernetes.io/projected/82a04500-5006-4149-a4db-1982b49a1fcd-kube-api-access-7jv5h\") pod \"marketplace-operator-79b997595-6hchn\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.589149 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkhwd\" (UniqueName: \"kubernetes.io/projected/bfc66780-664e-4251-af70-c8b690092170-kube-api-access-tkhwd\") pod \"machine-config-server-kvfcw\" (UID: \"bfc66780-664e-4251-af70-c8b690092170\") " pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.589219 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.589496 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.589608 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.089586094 +0000 UTC m=+146.615224795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.589991 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.590607 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.090590723 +0000 UTC m=+146.616229424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: W1009 00:09:08.608418 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c410054_569a_4d33_8e55_87f28ba661f6.slice/crio-40987c3184aa265f47ca17ca6fd499554246a9558894603c717f8c0b788e3a89 WatchSource:0}: Error finding container 40987c3184aa265f47ca17ca6fd499554246a9558894603c717f8c0b788e3a89: Status 404 returned error can't find the container with id 40987c3184aa265f47ca17ca6fd499554246a9558894603c717f8c0b788e3a89 Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.614065 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzr6s\" (UniqueName: \"kubernetes.io/projected/76be45bb-06fd-4e17-859a-8522d12fa162-kube-api-access-bzr6s\") pod \"machine-config-controller-84d6567774-rjmwx\" (UID: \"76be45bb-06fd-4e17-859a-8522d12fa162\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.614262 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.634181 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvw82\" (UniqueName: \"kubernetes.io/projected/065c4541-9a8f-4e9b-95c8-621fc0c1ef9b-kube-api-access-jvw82\") pod \"multus-admission-controller-857f4d67dd-blbp4\" (UID: \"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.644171 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.644905 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.648178 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qjfl\" (UniqueName: \"kubernetes.io/projected/41b0a7b8-e506-4c0b-a36c-6103481cc6e1-kube-api-access-4qjfl\") pod \"csi-hostpathplugin-g8p2l\" (UID: \"41b0a7b8-e506-4c0b-a36c-6103481cc6e1\") " pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.661966 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.662467 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.666644 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw2jz\" (UniqueName: \"kubernetes.io/projected/c8d4552a-7960-4504-bd01-aaa15082c8ab-kube-api-access-kw2jz\") pod \"catalog-operator-68c6474976-l8p8f\" (UID: \"c8d4552a-7960-4504-bd01-aaa15082c8ab\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.692784 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.693007 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.192976665 +0000 UTC m=+146.718615366 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.693224 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.693686 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.193670636 +0000 UTC m=+146.719309337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.701986 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slv9b\" (UniqueName: \"kubernetes.io/projected/6779aaef-a11f-4301-91cb-bd26489f7408-kube-api-access-slv9b\") pod \"migrator-59844c95c7-ssmlk\" (UID: \"6779aaef-a11f-4301-91cb-bd26489f7408\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.723445 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.733182 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-685td"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.733709 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.744475 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60ab3d72-24f7-490e-930e-1e60fdbfe74b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dpdf2\" (UID: \"60ab3d72-24f7-490e-930e-1e60fdbfe74b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.744519 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcrkh\" (UniqueName: \"kubernetes.io/projected/400482aa-2212-4b4d-acba-af4c61fb24f7-kube-api-access-hcrkh\") pod \"openshift-controller-manager-operator-756b6f6bc6-qzjjp\" (UID: \"400482aa-2212-4b4d-acba-af4c61fb24f7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.747078 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.747174 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67fm5\" (UniqueName: \"kubernetes.io/projected/f942d602-c4fc-47a2-82d5-abc83eb4472b-kube-api-access-67fm5\") pod \"dns-default-q7x6z\" (UID: \"f942d602-c4fc-47a2-82d5-abc83eb4472b\") " pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.765266 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6vxk\" (UniqueName: \"kubernetes.io/projected/1884cc58-91e6-45ba-b5f0-061c85b25798-kube-api-access-k6vxk\") pod \"service-ca-9c57cc56f-w5fvk\" (UID: \"1884cc58-91e6-45ba-b5f0-061c85b25798\") " pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.766125 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.785413 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfgc4\" (UniqueName: \"kubernetes.io/projected/a8e183dc-1f17-4b0f-a177-e4a17569b307-kube-api-access-zfgc4\") pod \"olm-operator-6b444d44fb-tsnwg\" (UID: \"a8e183dc-1f17-4b0f-a177-e4a17569b307\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.787423 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.794975 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.795510 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.295487771 +0000 UTC m=+146.821126472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.809891 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-kvfcw" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.821656 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr5h6\" (UniqueName: \"kubernetes.io/projected/62454395-5349-44a7-8f9e-4acfe7a271cd-kube-api-access-jr5h6\") pod \"service-ca-operator-777779d784-tjgjx\" (UID: \"62454395-5349-44a7-8f9e-4acfe7a271cd\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.822657 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p666r\" (UniqueName: \"kubernetes.io/projected/057eac7b-cf2e-4d17-9dfc-cfae85b2e26b-kube-api-access-p666r\") pod \"ingress-canary-zgvxk\" (UID: \"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b\") " pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.838032 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.844017 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.849661 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcq65\" (UniqueName: \"kubernetes.io/projected/c82928f7-de15-44e5-a60d-2494d90cc554-kube-api-access-qcq65\") pod \"collect-profiles-29332800-6m95d\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.852250 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-zgvxk" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.862673 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6xcp\" (UniqueName: \"kubernetes.io/projected/2eb13d08-4b13-48c5-a5a9-84407effd402-kube-api-access-n6xcp\") pod \"console-f9d7485db-dhpq4\" (UID: \"2eb13d08-4b13-48c5-a5a9-84407effd402\") " pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.882971 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/681c7462-9aad-4dda-abb5-541f675d83a8-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-pd9xd\" (UID: \"681c7462-9aad-4dda-abb5-541f675d83a8\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.896610 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.897539 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.397526812 +0000 UTC m=+146.923165513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.900042 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.911965 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lsgnf"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.922941 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.930731 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6bvf\" (UniqueName: \"kubernetes.io/projected/bb7dae35-5849-4f2f-88df-a5f815c4d2f5-kube-api-access-b6bvf\") pod \"package-server-manager-789f6589d5-qlkkp\" (UID: \"bb7dae35-5849-4f2f-88df-a5f815c4d2f5\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.931393 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.939876 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.945264 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvnxw\" (UniqueName: \"kubernetes.io/projected/e5bc3465-32cf-4253-8a62-d05b03903515-kube-api-access-rvnxw\") pod \"packageserver-d55dfcdfc-289v7\" (UID: \"e5bc3465-32cf-4253-8a62-d05b03903515\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.948849 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r26q\" (UniqueName: \"kubernetes.io/projected/8f995ff5-64f5-41c4-aa59-0521d9cd2328-kube-api-access-4r26q\") pod \"etcd-operator-b45778765-z2zxv\" (UID: \"8f995ff5-64f5-41c4-aa59-0521d9cd2328\") " pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.967860 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h65w4\" (UniqueName: \"kubernetes.io/projected/e238e120-5ce2-4690-87ca-a950ad8afa8c-kube-api-access-h65w4\") pod \"openshift-config-operator-7777fb866f-gwvst\" (UID: \"e238e120-5ce2-4690-87ca-a950ad8afa8c\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:08 crc kubenswrapper[4810]: W1009 00:09:08.971987 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfc66780_664e_4251_af70_c8b690092170.slice/crio-0f20265f1489766a6db64a80a20200fd4f35206662d710df387ec032f5bc365d WatchSource:0}: Error finding container 0f20265f1489766a6db64a80a20200fd4f35206662d710df387ec032f5bc365d: Status 404 returned error can't find the container with id 0f20265f1489766a6db64a80a20200fd4f35206662d710df387ec032f5bc365d Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.986490 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4pcl\" (UniqueName: \"kubernetes.io/projected/4d279934-ea93-46eb-9f1a-cfc03bb52ac1-kube-api-access-b4pcl\") pod \"machine-config-operator-74547568cd-7c4pt\" (UID: \"4d279934-ea93-46eb-9f1a-cfc03bb52ac1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.998222 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7"] Oct 09 00:09:08 crc kubenswrapper[4810]: I1009 00:09:08.998528 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:08 crc kubenswrapper[4810]: E1009 00:09:08.999638 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.499493172 +0000 UTC m=+147.025131883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.007280 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.026009 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.041089 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.056032 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.059111 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.064280 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" event={"ID":"e1bff448-76f4-4bda-a346-9ba668d4d05c","Type":"ContainerStarted","Data":"aa03ea4cd2008449861598c5b0635aa32cb8b9547c2f8113891de4dbefaf4319"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.064310 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" event={"ID":"e1bff448-76f4-4bda-a346-9ba668d4d05c","Type":"ContainerStarted","Data":"a926c101c79c227028eba67d4d12e5127aefdea515476dd040d8c1172596bda8"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.065965 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" event={"ID":"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994","Type":"ContainerStarted","Data":"12bf34e0c984b52944fdeb604dbb286546c1e129d849ee43842bab3e76fa202e"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.066000 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" event={"ID":"a9f2d63d-ad97-4ee1-8acd-68dd9fc22994","Type":"ContainerStarted","Data":"352c56ce24ba88ab21998e94339ae8379973dd698c53e1166c8fe40c863b4bc8"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.066664 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" event={"ID":"1e9a257e-bb41-452c-9b3f-cbef1c754d1f","Type":"ContainerStarted","Data":"1f05b53c713b1a84152cdf66c2b2385866690a6c7000b8db928f4d0066cde927"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.066688 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" event={"ID":"1e9a257e-bb41-452c-9b3f-cbef1c754d1f","Type":"ContainerStarted","Data":"e1850e3eba316141b81047916598d43144a96fa18f85886d3b64f06d99e80fa8"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.076874 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.078577 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.078869 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" event={"ID":"456f2042-395a-424b-80c1-3bc40286210d","Type":"ContainerStarted","Data":"8285addca67cf339785dc02fc5eb1f10e631e227d0a8146964ca2a068a01c14f"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.078898 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" event={"ID":"456f2042-395a-424b-80c1-3bc40286210d","Type":"ContainerStarted","Data":"68f0703828a75e719cab97d34f12f3a0fe32fcc24c52b4127749eb61c09a249d"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.087375 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" event={"ID":"06fdb80d-d467-44d7-9859-efcc2fff59e6","Type":"ContainerStarted","Data":"b447acf581c5f4aa1ffb761c4b5940c769b05848cdb22d502d2755d92895352e"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.087413 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" event={"ID":"06fdb80d-d467-44d7-9859-efcc2fff59e6","Type":"ContainerStarted","Data":"c59b0845608ad41b84b899da6bf035b985092ac15cc966d53902595c5f2a56b2"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.100505 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-5jswd"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.101002 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.101519 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.102618 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.602607635 +0000 UTC m=+147.128246336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.106606 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.160539 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" event={"ID":"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39","Type":"ContainerStarted","Data":"45314b798f8d7fce4964710ce717ebce4f3715f5925af287b5d40f134c38f973"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.188709 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-685td" event={"ID":"2b7db5ab-0ff2-457c-8daf-53c06748f04e","Type":"ContainerStarted","Data":"9e8e62762c0abd749d1e7caa983e7b0692878c6e908d055ad9358a9bdd8e5f58"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.201597 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-78fsr" event={"ID":"08622f6b-de6d-48bc-998d-435e09a52226","Type":"ContainerStarted","Data":"94560f6843f43d4e6bf03e0513848bd4ca2ce2ea4a12ddf81fd0a4c749901fbf"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.201642 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-78fsr" event={"ID":"08622f6b-de6d-48bc-998d-435e09a52226","Type":"ContainerStarted","Data":"0cc804d40c5d856258427f16fc71b56ef6c76a01daeae70a96bb51ed88e4d499"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.210613 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.213720 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.713703335 +0000 UTC m=+147.239342036 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.231653 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-thprd" event={"ID":"06f5797c-1640-413d-882d-957c10daea4c","Type":"ContainerStarted","Data":"ac30a377daf569f8152c4214f04ec689f7528fe3f017c1052ddc943a38e32865"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.235350 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-thprd" event={"ID":"06f5797c-1640-413d-882d-957c10daea4c","Type":"ContainerStarted","Data":"c8b66f9ff6b77c9e393bde319605d0febb9a11a4c14d687040951e3046af994f"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.240771 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" event={"ID":"9592f7ec-a684-4b31-97b3-32c3439a8ee0","Type":"ContainerStarted","Data":"3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.240829 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" event={"ID":"9592f7ec-a684-4b31-97b3-32c3439a8ee0","Type":"ContainerStarted","Data":"2c2ed0b98219b2d2c70ec71a262f9f133cbe2b2a8697572992dcec5e656f9fb5"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.241311 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.244037 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-kvfcw" event={"ID":"bfc66780-664e-4251-af70-c8b690092170","Type":"ContainerStarted","Data":"0f20265f1489766a6db64a80a20200fd4f35206662d710df387ec032f5bc365d"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.245357 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" event={"ID":"7c410054-569a-4d33-8e55-87f28ba661f6","Type":"ContainerStarted","Data":"40987c3184aa265f47ca17ca6fd499554246a9558894603c717f8c0b788e3a89"} Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.252451 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.257899 4810 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mzlgx container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.257932 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" podUID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Oct 09 00:09:09 crc kubenswrapper[4810]: W1009 00:09:09.282234 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4af9ab8_8625_4c14_a0af_a566bb0ad0ed.slice/crio-d86fa808729c57ba5113696ff4efddeb79cd2a61bec1e8ec0e68a9617400df86 WatchSource:0}: Error finding container d86fa808729c57ba5113696ff4efddeb79cd2a61bec1e8ec0e68a9617400df86: Status 404 returned error can't find the container with id d86fa808729c57ba5113696ff4efddeb79cd2a61bec1e8ec0e68a9617400df86 Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.318854 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.321595 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-78fsr" podStartSLOduration=127.321578369 podStartE2EDuration="2m7.321578369s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:09.320630051 +0000 UTC m=+146.846268752" watchObservedRunningTime="2025-10-09 00:09:09.321578369 +0000 UTC m=+146.847217070" Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.322775 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.822755424 +0000 UTC m=+147.348394205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.420138 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.420784 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:09.920764356 +0000 UTC m=+147.446403057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.429434 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-g8p2l"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.506164 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.511314 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.511395 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.518777 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.520667 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.522255 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.522572 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.022560541 +0000 UTC m=+147.548199232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.578484 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29332800-wtmbd"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.624191 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.627044 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.127026484 +0000 UTC m=+147.652665185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.683328 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-g9tnq" podStartSLOduration=127.683311121 podStartE2EDuration="2m7.683311121s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:09.681619731 +0000 UTC m=+147.207258432" watchObservedRunningTime="2025-10-09 00:09:09.683311121 +0000 UTC m=+147.208949822" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.719606 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-tmpb8"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.720156 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-blbp4"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.725499 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.725903 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.225889781 +0000 UTC m=+147.751528482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.736406 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.793379 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.800386 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.800438 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-q7x6z"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.800449 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.821256 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6hchn"] Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.826396 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.826741 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.326724647 +0000 UTC m=+147.852363348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.884178 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6zdk4" podStartSLOduration=127.884158228 podStartE2EDuration="2m7.884158228s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:09.883368475 +0000 UTC m=+147.409007196" watchObservedRunningTime="2025-10-09 00:09:09.884158228 +0000 UTC m=+147.409796929" Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.928368 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:09 crc kubenswrapper[4810]: E1009 00:09:09.928694 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.428679495 +0000 UTC m=+147.954318196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:09 crc kubenswrapper[4810]: I1009 00:09:09.984060 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" podStartSLOduration=127.984040885 podStartE2EDuration="2m7.984040885s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:09.982375256 +0000 UTC m=+147.508013967" watchObservedRunningTime="2025-10-09 00:09:09.984040885 +0000 UTC m=+147.509679596" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.014057 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-zgvxk"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.029327 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.029543 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.529489371 +0000 UTC m=+148.055128072 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.029787 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.030164 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.53015097 +0000 UTC m=+148.055789671 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.037138 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jp7tg" podStartSLOduration=128.037116717 podStartE2EDuration="2m8.037116717s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.034769157 +0000 UTC m=+147.560407868" watchObservedRunningTime="2025-10-09 00:09:10.037116717 +0000 UTC m=+147.562755428" Oct 09 00:09:10 crc kubenswrapper[4810]: W1009 00:09:10.096357 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a71c1e2_9003_4d7c_ace4_ce4d14b826fb.slice/crio-8cf01c5c3e6be462a2672aad87504210ae519284d2377f14eb70e975b12dc2f1 WatchSource:0}: Error finding container 8cf01c5c3e6be462a2672aad87504210ae519284d2377f14eb70e975b12dc2f1: Status 404 returned error can't find the container with id 8cf01c5c3e6be462a2672aad87504210ae519284d2377f14eb70e975b12dc2f1 Oct 09 00:09:10 crc kubenswrapper[4810]: W1009 00:09:10.121847 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf942d602_c4fc_47a2_82d5_abc83eb4472b.slice/crio-968ce98e5691a7febe86ec4d77875091729db8e44efd64dd976e7a4a498eff26 WatchSource:0}: Error finding container 968ce98e5691a7febe86ec4d77875091729db8e44efd64dd976e7a4a498eff26: Status 404 returned error can't find the container with id 968ce98e5691a7febe86ec4d77875091729db8e44efd64dd976e7a4a498eff26 Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.130500 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.130733 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.630704568 +0000 UTC m=+148.156343269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.130999 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.131459 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.63144462 +0000 UTC m=+148.157083321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.231544 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.232007 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.731980097 +0000 UTC m=+148.257618798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.255404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-kvfcw" event={"ID":"bfc66780-664e-4251-af70-c8b690092170","Type":"ContainerStarted","Data":"d6ac9191e44a1584afc6dcf4c85955c5ee6eaa4ea22f7f94daa03b3e78dc7688"} Oct 09 00:09:10 crc kubenswrapper[4810]: W1009 00:09:10.259261 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod057eac7b_cf2e_4d17_9dfc_cfae85b2e26b.slice/crio-29bff938bf3ee79a16f3d416ce2947aebecf45741cbe1f4bb0c66df593ed6964 WatchSource:0}: Error finding container 29bff938bf3ee79a16f3d416ce2947aebecf45741cbe1f4bb0c66df593ed6964: Status 404 returned error can't find the container with id 29bff938bf3ee79a16f3d416ce2947aebecf45741cbe1f4bb0c66df593ed6964 Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.260416 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" event={"ID":"9129ece3-62c9-4ce5-b1ce-43dd2f55b036","Type":"ContainerStarted","Data":"b717c3759479e468f7c3f3a37710c906eb489fca700632e83c9bbf5e6d346433"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.260464 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" event={"ID":"9129ece3-62c9-4ce5-b1ce-43dd2f55b036","Type":"ContainerStarted","Data":"017e2ebd86f8745bcd537481d0e4d65eef608e0ab9e9f5d1590369d93bf97130"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.262670 4810 generic.go:334] "Generic (PLEG): container finished" podID="7c410054-569a-4d33-8e55-87f28ba661f6" containerID="6cd12a2949699259c70fe0595af59465de23b0a26d38f06097607fe279ef44f3" exitCode=0 Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.262736 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" event={"ID":"7c410054-569a-4d33-8e55-87f28ba661f6","Type":"ContainerDied","Data":"6cd12a2949699259c70fe0595af59465de23b0a26d38f06097607fe279ef44f3"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.284409 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" event={"ID":"06fdb80d-d467-44d7-9859-efcc2fff59e6","Type":"ContainerStarted","Data":"511fd3d7588f27bc1de3008b273181c55ff0f5628da1d4b8fda0ee63f7313ffc"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.295244 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" event={"ID":"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39","Type":"ContainerStarted","Data":"665b47935d7068efc6ec278b0d6dece2ca1489ce58a1fc2d3a1aed7d022f4587"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.297712 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-685td" event={"ID":"2b7db5ab-0ff2-457c-8daf-53c06748f04e","Type":"ContainerStarted","Data":"8d581ad9c3e3215b349acbea7d8631e214bfffc513627c734c41a7defe702148"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.298707 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.311970 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" event={"ID":"a9b072a5-a708-4f29-9aae-f52e98802f1c","Type":"ContainerStarted","Data":"fc69fc4248a40e9a53eb648bdadd2dbb6ce75540762fecad00145ce31d9aed2f"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.314696 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" event={"ID":"c8d4552a-7960-4504-bd01-aaa15082c8ab","Type":"ContainerStarted","Data":"a3f5c3e219cf8f768dd06eca7c40911e74c415eb48e868a369f9b996332e7fc8"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.314745 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" event={"ID":"c8d4552a-7960-4504-bd01-aaa15082c8ab","Type":"ContainerStarted","Data":"fcc26e4ba57af95c9404e4eb4243c7327bf2466c493b33807ac5f7cac8f05397"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.315890 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29332800-wtmbd" event={"ID":"788a06a3-b34e-460e-a981-67130389de67","Type":"ContainerStarted","Data":"da615b4c48b8727820296812a50f24b02a8dd91fb3c322cc06ff7edfd6d285ed"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.315912 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29332800-wtmbd" event={"ID":"788a06a3-b34e-460e-a981-67130389de67","Type":"ContainerStarted","Data":"61667b47c9fbc393fa5ba3ab02e86eadd474ab93eb47d10a4f027c665976f2e0"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.317924 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" event={"ID":"6779aaef-a11f-4301-91cb-bd26489f7408","Type":"ContainerStarted","Data":"8eae5d94b05e17a605f9ee7aa75f525ed8c5bdec178d9c31ca44ca6e692f2dd5"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.334321 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.337367 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.340368 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.349515 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.849488526 +0000 UTC m=+148.375127227 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.353225 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" event={"ID":"60ab3d72-24f7-490e-930e-1e60fdbfe74b","Type":"ContainerStarted","Data":"b6e6cd0914ac2161d64858a503652590716b4272065fb56fc5f7fc45b464bcff"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.361229 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-trv84" podStartSLOduration=128.361209604 podStartE2EDuration="2m8.361209604s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.321308822 +0000 UTC m=+147.846947523" watchObservedRunningTime="2025-10-09 00:09:10.361209604 +0000 UTC m=+147.886848295" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.385884 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" event={"ID":"82a04500-5006-4149-a4db-1982b49a1fcd","Type":"ContainerStarted","Data":"5ea1d33092c6c2f38c12257f3d3f672e6411716419858d524b5382d2866f0a58"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.416175 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" event={"ID":"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed","Type":"ContainerStarted","Data":"622babee73a8521193cfc5f7ad117261f471bc2941415b1b88a07cc5b3de4c2a"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.416213 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" event={"ID":"d4af9ab8-8625-4c14-a0af-a566bb0ad0ed","Type":"ContainerStarted","Data":"d86fa808729c57ba5113696ff4efddeb79cd2a61bec1e8ec0e68a9617400df86"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.422508 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" event={"ID":"75eef79f-6ade-40d4-8bf9-768a5fe06edc","Type":"ContainerStarted","Data":"2abd587fd50ff2131948654a1437a714154bdb51f8d2e2d0918efaac56a8d1f1"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.431287 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" event={"ID":"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf","Type":"ContainerStarted","Data":"c37b6d47886797dace73ab065ec78fb86bced5b13049f658a03c5a1acbd02da7"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.432811 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" event={"ID":"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb","Type":"ContainerStarted","Data":"8cf01c5c3e6be462a2672aad87504210ae519284d2377f14eb70e975b12dc2f1"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.434126 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" event={"ID":"41b0a7b8-e506-4c0b-a36c-6103481cc6e1","Type":"ContainerStarted","Data":"ba4d4b35239274bd5c14dc447bb31050088692982ac97cec44ed6690e41a5e7b"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.435980 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.436152 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.936115322 +0000 UTC m=+148.461754023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.436339 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.437150 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:10.937141792 +0000 UTC m=+148.462780493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.446404 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" event={"ID":"690a8646-257c-49be-b693-e151a1bab532","Type":"ContainerStarted","Data":"175818c56cc0ae4a0a5d51ceebc1b9f83236d0dbe9d767506107c6608a6f5fec"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.460928 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" event={"ID":"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b","Type":"ContainerStarted","Data":"4b8f8194b58e5c8cdb3556219098fdbf94ff6b88bc5b01f143ba190b002b8d78"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.462145 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q7x6z" event={"ID":"f942d602-c4fc-47a2-82d5-abc83eb4472b","Type":"ContainerStarted","Data":"968ce98e5691a7febe86ec4d77875091729db8e44efd64dd976e7a4a498eff26"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.487694 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" event={"ID":"76be45bb-06fd-4e17-859a-8522d12fa162","Type":"ContainerStarted","Data":"fc95c5ae16b00fc30de564a8dd07f880791b0cfba5589cba82e16e69a749f33d"} Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.513132 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.542199 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.545907 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.045865521 +0000 UTC m=+148.571504222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.567157 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:10 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:10 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:10 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.567208 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.645743 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.646156 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.146143161 +0000 UTC m=+148.671781862 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.664135 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.664199 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dhpq4"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.682349 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-685td" podStartSLOduration=128.682335873 podStartE2EDuration="2m8.682335873s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.680201029 +0000 UTC m=+148.205839730" watchObservedRunningTime="2025-10-09 00:09:10.682335873 +0000 UTC m=+148.207974564" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.716969 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-w5fvk"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.731483 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.744817 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.746916 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.747017 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.246996587 +0000 UTC m=+148.772635288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.747408 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.747787 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.247779511 +0000 UTC m=+148.773418212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.757053 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.770372 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.771903 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-2wxs8" podStartSLOduration=128.771888194 podStartE2EDuration="2m8.771888194s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.726323115 +0000 UTC m=+148.251961826" watchObservedRunningTime="2025-10-09 00:09:10.771888194 +0000 UTC m=+148.297526895" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.811411 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" podStartSLOduration=128.811396974 podStartE2EDuration="2m8.811396974s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.809307903 +0000 UTC m=+148.334946604" watchObservedRunningTime="2025-10-09 00:09:10.811396974 +0000 UTC m=+148.337035665" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.813570 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.849147 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.850230 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.350215054 +0000 UTC m=+148.875853755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.853565 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gwvst"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.866412 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gf69h" podStartSLOduration=128.866390153 podStartE2EDuration="2m8.866390153s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.85108587 +0000 UTC m=+148.376724581" watchObservedRunningTime="2025-10-09 00:09:10.866390153 +0000 UTC m=+148.392028854" Oct 09 00:09:10 crc kubenswrapper[4810]: W1009 00:09:10.866698 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1884cc58_91e6_45ba_b5f0_061c85b25798.slice/crio-50111bf465d3b78447f6791dfb6d2a604b5dc348932603d0a586a4b61f2b5403 WatchSource:0}: Error finding container 50111bf465d3b78447f6791dfb6d2a604b5dc348932603d0a586a4b61f2b5403: Status 404 returned error can't find the container with id 50111bf465d3b78447f6791dfb6d2a604b5dc348932603d0a586a4b61f2b5403 Oct 09 00:09:10 crc kubenswrapper[4810]: W1009 00:09:10.871348 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8e183dc_1f17_4b0f_a177_e4a17569b307.slice/crio-fa9f33367e5030ad0d4c5492120b0e32026cb3108c59c37dd6c9fd17dbe1b31a WatchSource:0}: Error finding container fa9f33367e5030ad0d4c5492120b0e32026cb3108c59c37dd6c9fd17dbe1b31a: Status 404 returned error can't find the container with id fa9f33367e5030ad0d4c5492120b0e32026cb3108c59c37dd6c9fd17dbe1b31a Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.895613 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.925214 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-kvfcw" podStartSLOduration=5.925191854 podStartE2EDuration="5.925191854s" podCreationTimestamp="2025-10-09 00:09:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.896043091 +0000 UTC m=+148.421681792" watchObservedRunningTime="2025-10-09 00:09:10.925191854 +0000 UTC m=+148.450830565" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.929335 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-z2zxv"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.931772 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt"] Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.951669 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:10 crc kubenswrapper[4810]: E1009 00:09:10.952506 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.452470352 +0000 UTC m=+148.978109053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.954248 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29332800-wtmbd" podStartSLOduration=128.954234474 podStartE2EDuration="2m8.954234474s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.946717781 +0000 UTC m=+148.472356482" watchObservedRunningTime="2025-10-09 00:09:10.954234474 +0000 UTC m=+148.479873175" Oct 09 00:09:10 crc kubenswrapper[4810]: I1009 00:09:10.978399 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" podStartSLOduration=128.978381749 podStartE2EDuration="2m8.978381749s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:10.977245415 +0000 UTC m=+148.502884116" watchObservedRunningTime="2025-10-09 00:09:10.978381749 +0000 UTC m=+148.504020440" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.054000 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.054142 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.554115732 +0000 UTC m=+149.079754433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.054258 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.054553 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.554545884 +0000 UTC m=+149.080184585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.156359 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.156893 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.656876515 +0000 UTC m=+149.182515216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.156911 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.156934 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.156968 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.157008 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.157028 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.160512 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.660496892 +0000 UTC m=+149.186135593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.163242 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.163916 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:09:11 crc kubenswrapper[4810]: W1009 00:09:11.165978 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d279934_ea93_46eb_9f1a_cfc03bb52ac1.slice/crio-a97c2ec7747af240f30722657c7d980e9552826104c5ab44ec090bada1e6fb85 WatchSource:0}: Error finding container a97c2ec7747af240f30722657c7d980e9552826104c5ab44ec090bada1e6fb85: Status 404 returned error can't find the container with id a97c2ec7747af240f30722657c7d980e9552826104c5ab44ec090bada1e6fb85 Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.166457 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.201303 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.263416 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.283994 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.783960578 +0000 UTC m=+149.309599269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.284141 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.284859 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.293328 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.387367 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.387722 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.88770962 +0000 UTC m=+149.413348321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.488527 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.488622 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.988601058 +0000 UTC m=+149.514239759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.488876 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.489427 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:11.989407831 +0000 UTC m=+149.515046532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.496732 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" event={"ID":"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb","Type":"ContainerStarted","Data":"e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.505982 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.506324 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.506353 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.516009 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:11 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:11 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:11 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.516070 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.524016 4810 patch_prober.go:28] interesting pod/apiserver-76f77b778f-ch5jb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]log ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]etcd ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/generic-apiserver-start-informers ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/max-in-flight-filter ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 09 00:09:11 crc kubenswrapper[4810]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/project.openshift.io-projectcache ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/openshift.io-startinformers ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 09 00:09:11 crc kubenswrapper[4810]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 09 00:09:11 crc kubenswrapper[4810]: livez check failed Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.524090 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" podUID="7d4d7acd-916e-45d1-8d4d-990c386d806d" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.525192 4810 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rvkhn container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.525241 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" podUID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.527282 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" podStartSLOduration=128.527268563 podStartE2EDuration="2m8.527268563s" podCreationTimestamp="2025-10-09 00:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.525460989 +0000 UTC m=+149.051099710" watchObservedRunningTime="2025-10-09 00:09:11.527268563 +0000 UTC m=+149.052907264" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.531193 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-zgvxk" event={"ID":"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b","Type":"ContainerStarted","Data":"d4eb8d5a550e5a1df78bee22aeba33edf536510e269e741f0909e077ff77eea4"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.531235 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-zgvxk" event={"ID":"057eac7b-cf2e-4d17-9dfc-cfae85b2e26b","Type":"ContainerStarted","Data":"29bff938bf3ee79a16f3d416ce2947aebecf45741cbe1f4bb0c66df593ed6964"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.533037 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" event={"ID":"62454395-5349-44a7-8f9e-4acfe7a271cd","Type":"ContainerStarted","Data":"f8c112826ded91a5e2a178e4e7974f5c50b527fb5c60f35a6564817731288faf"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.540586 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" event={"ID":"d7e6e1a2-50a6-4b46-a064-206b6ae1fa39","Type":"ContainerStarted","Data":"42f39999e2706ec5870dfc5ec8d7be668482f5a8b7c1a70aadc1c825bbbd9a23"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.557205 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-zgvxk" podStartSLOduration=5.557184728 podStartE2EDuration="5.557184728s" podCreationTimestamp="2025-10-09 00:09:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.55285104 +0000 UTC m=+149.078489741" watchObservedRunningTime="2025-10-09 00:09:11.557184728 +0000 UTC m=+149.082823429" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.584557 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" event={"ID":"6779aaef-a11f-4301-91cb-bd26489f7408","Type":"ContainerStarted","Data":"a6c367db5388d6defdffa9c042318ef8fe8d8d5c49610ae6ff6f19a30032d022"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.590357 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.590662 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.090647479 +0000 UTC m=+149.616286170 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.593239 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-s8k6h" podStartSLOduration=129.593222186 podStartE2EDuration="2m9.593222186s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.591266358 +0000 UTC m=+149.116905069" watchObservedRunningTime="2025-10-09 00:09:11.593222186 +0000 UTC m=+149.118860877" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.599415 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" event={"ID":"bb7dae35-5849-4f2f-88df-a5f815c4d2f5","Type":"ContainerStarted","Data":"5163c76242731bc180d1c58c8c79626b9074ad2c904ccc8caba3202b0c4ff3c8"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.603002 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q7x6z" event={"ID":"f942d602-c4fc-47a2-82d5-abc83eb4472b","Type":"ContainerStarted","Data":"70f3dae6b8a14579f120522d17e195ae494f1aaaa3d6291c38055d8a336e6fbc"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.611664 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" event={"ID":"9129ece3-62c9-4ce5-b1ce-43dd2f55b036","Type":"ContainerStarted","Data":"d7cdff1de8c1685205a79700754ffca697f1aab5107c78bce5973ea11bd2efad"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.625759 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" event={"ID":"400482aa-2212-4b4d-acba-af4c61fb24f7","Type":"ContainerStarted","Data":"f1b2486e4a2748ff2e6ce16c73f2f374c360625a5ae1cfacf0ddb5fb5f01b8b1"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.626099 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" event={"ID":"400482aa-2212-4b4d-acba-af4c61fb24f7","Type":"ContainerStarted","Data":"dee405b5a9a94fa31ae1a6399b6c9483c5c0cc3cbb61c0644f0f8b54ceb82ed3"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.641221 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4mpg7" podStartSLOduration=129.641199286 podStartE2EDuration="2m9.641199286s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.637078024 +0000 UTC m=+149.162716735" watchObservedRunningTime="2025-10-09 00:09:11.641199286 +0000 UTC m=+149.166838007" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.652134 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" event={"ID":"1884cc58-91e6-45ba-b5f0-061c85b25798","Type":"ContainerStarted","Data":"50111bf465d3b78447f6791dfb6d2a604b5dc348932603d0a586a4b61f2b5403"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.664521 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-qzjjp" podStartSLOduration=129.664501976 podStartE2EDuration="2m9.664501976s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.663484226 +0000 UTC m=+149.189122947" watchObservedRunningTime="2025-10-09 00:09:11.664501976 +0000 UTC m=+149.190140687" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.680687 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" event={"ID":"8f995ff5-64f5-41c4-aa59-0521d9cd2328","Type":"ContainerStarted","Data":"1f6d5f76e70a226c068c8aba71d63fa3209151d42f0e44a94dc11bef6caff1d2"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.687084 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" event={"ID":"75eef79f-6ade-40d4-8bf9-768a5fe06edc","Type":"ContainerStarted","Data":"1d064437959a24e1221eb6202262fb8afffb9697490d9045b59984f8234b755f"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.687804 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.689012 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" event={"ID":"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b","Type":"ContainerStarted","Data":"468518e18aa76f953de93d2d2b5095ff02754c7b6f4f267dfabfbb4dfc89a7fb"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.692988 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" podStartSLOduration=129.692971029 podStartE2EDuration="2m9.692971029s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.692345561 +0000 UTC m=+149.217984262" watchObservedRunningTime="2025-10-09 00:09:11.692971029 +0000 UTC m=+149.218609730" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.693584 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.696867 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.196853184 +0000 UTC m=+149.722491885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.702673 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" event={"ID":"82a04500-5006-4149-a4db-1982b49a1fcd","Type":"ContainerStarted","Data":"98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.703500 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.715416 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" podStartSLOduration=129.715393933 podStartE2EDuration="2m9.715393933s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.715257999 +0000 UTC m=+149.240896700" watchObservedRunningTime="2025-10-09 00:09:11.715393933 +0000 UTC m=+149.241032654" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.717754 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" event={"ID":"e5bc3465-32cf-4253-8a62-d05b03903515","Type":"ContainerStarted","Data":"e01460ab6c41b9a921dc5e326175ce7740f98e6a7f7802424ee14a65a3035c89"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.723382 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" event={"ID":"4d279934-ea93-46eb-9f1a-cfc03bb52ac1","Type":"ContainerStarted","Data":"a97c2ec7747af240f30722657c7d980e9552826104c5ab44ec090bada1e6fb85"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.725406 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" event={"ID":"e238e120-5ce2-4690-87ca-a950ad8afa8c","Type":"ContainerStarted","Data":"d0235f9b971fcba5a5ffff6beb780dc360e6face173ac8384ab1498cbabb5955"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.727058 4810 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6hchn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.727092 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.727294 4810 patch_prober.go:28] interesting pod/console-operator-58897d9998-tmpb8 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.727314 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" podUID="75eef79f-6ade-40d4-8bf9-768a5fe06edc" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.738140 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" podStartSLOduration=129.738124846 podStartE2EDuration="2m9.738124846s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.737951541 +0000 UTC m=+149.263590242" watchObservedRunningTime="2025-10-09 00:09:11.738124846 +0000 UTC m=+149.263763547" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.745694 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dhpq4" event={"ID":"2eb13d08-4b13-48c5-a5a9-84407effd402","Type":"ContainerStarted","Data":"69abdeb110f339b469d05eaf08ea1a9d715e8136ed46a9b709818ca079e57990"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.750733 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" event={"ID":"c82928f7-de15-44e5-a60d-2494d90cc554","Type":"ContainerStarted","Data":"adbaf1f2b8fc3b4838f765455f7005f6686b9f6820e8b72042bc6817726d811b"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.760205 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" event={"ID":"76be45bb-06fd-4e17-859a-8522d12fa162","Type":"ContainerStarted","Data":"4c169b67cbf1dc4ba59efa1be78db5b45ef743477c7c586cf94beae267621501"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.760242 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" event={"ID":"76be45bb-06fd-4e17-859a-8522d12fa162","Type":"ContainerStarted","Data":"d8b133695aafa458eecf2353353c441de8d04a8ebe2412a1c8f19ce93797f18e"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.764437 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" event={"ID":"a8e183dc-1f17-4b0f-a177-e4a17569b307","Type":"ContainerStarted","Data":"fa9f33367e5030ad0d4c5492120b0e32026cb3108c59c37dd6c9fd17dbe1b31a"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.770447 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" podStartSLOduration=128.770434093 podStartE2EDuration="2m8.770434093s" podCreationTimestamp="2025-10-09 00:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.769437254 +0000 UTC m=+149.295075965" watchObservedRunningTime="2025-10-09 00:09:11.770434093 +0000 UTC m=+149.296072794" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.784615 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" event={"ID":"60ab3d72-24f7-490e-930e-1e60fdbfe74b","Type":"ContainerStarted","Data":"12b4099ecc281b0dd11a675caff552ec4224afc9d86d078b4bf63fa9ed7fe804"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.794766 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.795747 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.295717192 +0000 UTC m=+149.821355893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.816911 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-rjmwx" podStartSLOduration=129.816894139 podStartE2EDuration="2m9.816894139s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.815555529 +0000 UTC m=+149.341194230" watchObservedRunningTime="2025-10-09 00:09:11.816894139 +0000 UTC m=+149.342532840" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.843616 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" event={"ID":"690a8646-257c-49be-b693-e151a1bab532","Type":"ContainerStarted","Data":"3bc9d8f2ef81d86431b82d64f639470058541c566e75b594c0786b6666ca8425"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.843659 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" event={"ID":"690a8646-257c-49be-b693-e151a1bab532","Type":"ContainerStarted","Data":"16fe4a009cd70c485b7cda0e1abebc634f3b5070529d20d869aedf1ec58637fb"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.869142 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" event={"ID":"681c7462-9aad-4dda-abb5-541f675d83a8","Type":"ContainerStarted","Data":"43878bd6b3ec15bb5e123618c776fa50d5f52feb9d6d03b70c86c1e51cc456a9"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.890063 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-5jswd" podStartSLOduration=129.890049815 podStartE2EDuration="2m9.890049815s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.88920998 +0000 UTC m=+149.414848681" watchObservedRunningTime="2025-10-09 00:09:11.890049815 +0000 UTC m=+149.415688516" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.890981 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dpdf2" podStartSLOduration=129.890975903 podStartE2EDuration="2m9.890975903s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.840082176 +0000 UTC m=+149.365720877" watchObservedRunningTime="2025-10-09 00:09:11.890975903 +0000 UTC m=+149.416614604" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.894758 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" event={"ID":"2e1db72d-c0c5-4abf-bfc2-42f5f187ccbf","Type":"ContainerStarted","Data":"2899ec96f5b4f6e23a1adfdc9b6637d35d1072b45fdebada7dcf006c08b73cb5"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.896891 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.897255 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.397243828 +0000 UTC m=+149.922882529 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.918328 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" podStartSLOduration=129.918314912 podStartE2EDuration="2m9.918314912s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.917022154 +0000 UTC m=+149.442660855" watchObservedRunningTime="2025-10-09 00:09:11.918314912 +0000 UTC m=+149.443953613" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.965872 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-thprd" event={"ID":"06f5797c-1640-413d-882d-957c10daea4c","Type":"ContainerStarted","Data":"1ee6cb7b0660a35ac000b654ff8a98c0c5589afd6ba327abd7a1f3c910db72cb"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.967925 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-mfjlm" podStartSLOduration=129.967907831 podStartE2EDuration="2m9.967907831s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:11.966839669 +0000 UTC m=+149.492478360" watchObservedRunningTime="2025-10-09 00:09:11.967907831 +0000 UTC m=+149.493546532" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.981988 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" event={"ID":"a9b072a5-a708-4f29-9aae-f52e98802f1c","Type":"ContainerStarted","Data":"d951b312cd5ed576de194a44d66c3d41d5cb41c2efb6e3254fd328e65ae54051"} Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.982033 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.982069 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.982427 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.982465 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:11 crc kubenswrapper[4810]: I1009 00:09:11.998227 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:11 crc kubenswrapper[4810]: E1009 00:09:11.998958 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.49894494 +0000 UTC m=+150.024583641 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.010363 4810 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lsgnf container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.25:6443/healthz\": dial tcp 10.217.0.25:6443: connect: connection refused" start-of-body= Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.010652 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" podUID="a9b072a5-a708-4f29-9aae-f52e98802f1c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.25:6443/healthz\": dial tcp 10.217.0.25:6443: connect: connection refused" Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.032544 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-thprd" podStartSLOduration=130.032517404 podStartE2EDuration="2m10.032517404s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:12.012157921 +0000 UTC m=+149.537796622" watchObservedRunningTime="2025-10-09 00:09:12.032517404 +0000 UTC m=+149.558156105" Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.037122 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-l8p8f" Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.078370 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" podStartSLOduration=130.078356891 podStartE2EDuration="2m10.078356891s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:12.075426605 +0000 UTC m=+149.601065306" watchObservedRunningTime="2025-10-09 00:09:12.078356891 +0000 UTC m=+149.603995612" Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.110171 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.115254 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.615239583 +0000 UTC m=+150.140878284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.213276 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.213794 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.713779301 +0000 UTC m=+150.239418002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.323811 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.324169 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.82415467 +0000 UTC m=+150.349793371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.425072 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.425400 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:12.925383137 +0000 UTC m=+150.451021838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.521043 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:12 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:12 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:12 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.521325 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.527609 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.527982 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.027967335 +0000 UTC m=+150.553606036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.631425 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.631707 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.131692696 +0000 UTC m=+150.657331397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.733798 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.734683 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.234667806 +0000 UTC m=+150.760306507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.834927 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.835201 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.335180602 +0000 UTC m=+150.860819303 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.935971 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:12 crc kubenswrapper[4810]: E1009 00:09:12.936328 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.436314287 +0000 UTC m=+150.961952988 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.986237 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dhpq4" event={"ID":"2eb13d08-4b13-48c5-a5a9-84407effd402","Type":"ContainerStarted","Data":"5a33d7baf407a50007c998193a3b316175acb456382e51b04d411ba72991613a"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.987748 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" event={"ID":"c82928f7-de15-44e5-a60d-2494d90cc554","Type":"ContainerStarted","Data":"1c0c064d8fe2fa27dbbccb76e429bc67b8abfecadd05c5c4b633d0d1bde5dd17"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.989531 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" event={"ID":"7c410054-569a-4d33-8e55-87f28ba661f6","Type":"ContainerStarted","Data":"13b4bbd44e25a1891dad62f6d8ea3afaf97393ba84e3cb1d9b91bdebb69a87d2"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.992548 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" event={"ID":"62454395-5349-44a7-8f9e-4acfe7a271cd","Type":"ContainerStarted","Data":"2ffa762c9df8951d5dee229b32b15b148e8053abacd838ac6946612147fc5c61"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.993844 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" event={"ID":"41b0a7b8-e506-4c0b-a36c-6103481cc6e1","Type":"ContainerStarted","Data":"57abc499fdbd1ad27b463285b384b386ba7fef5e268732929c49f5b4bfa00492"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.995059 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" event={"ID":"8f995ff5-64f5-41c4-aa59-0521d9cd2328","Type":"ContainerStarted","Data":"74c9001aa1f068c74668262bf31c08b2248547f0b1b40c044035fd74eedc5781"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.998122 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-pd9xd" event={"ID":"681c7462-9aad-4dda-abb5-541f675d83a8","Type":"ContainerStarted","Data":"7899406976ceac134f16232ffc189ccf90d713f63b41dc47c247ddacacc77553"} Oct 09 00:09:12 crc kubenswrapper[4810]: I1009 00:09:12.999652 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" event={"ID":"6779aaef-a11f-4301-91cb-bd26489f7408","Type":"ContainerStarted","Data":"623abd19079de908985fcdfc92cf249a85a0ce05050ed8bc1ca1b8931a7b786d"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.001229 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" event={"ID":"e5bc3465-32cf-4253-8a62-d05b03903515","Type":"ContainerStarted","Data":"ab7f1d1ef61b146af366c7037f9a65e96090df3eaba0e2799bc5ca080d2708b2"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.001468 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.003321 4810 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-289v7 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.003382 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" podUID="e5bc3465-32cf-4253-8a62-d05b03903515" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.004237 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" event={"ID":"065c4541-9a8f-4e9b-95c8-621fc0c1ef9b","Type":"ContainerStarted","Data":"3b909cf6ae7a0782906b4cb0fab7d231e8ef0811c4c000c79217d0a0ab8bd68c"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.005964 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bf93183fd89e9ed0fde974c137b7a81104c6389f281d655e0c8a82ecb3705719"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.005988 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"719c16834c5dd55387f80114fd148bfec3c0d5a51d1af07248ee055ce5f9d4e7"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.006109 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.007585 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-q7x6z" event={"ID":"f942d602-c4fc-47a2-82d5-abc83eb4472b","Type":"ContainerStarted","Data":"c8402cc288508fea70ff06bf508967fcaf36930c8ab97207cffcdd035f99a942"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.007852 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.009424 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" event={"ID":"bb7dae35-5849-4f2f-88df-a5f815c4d2f5","Type":"ContainerStarted","Data":"40ec5af735b659c20ecc20da3c344fc3cff134564970cb7e65d9a3b948eb6929"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.009459 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" event={"ID":"bb7dae35-5849-4f2f-88df-a5f815c4d2f5","Type":"ContainerStarted","Data":"5fd9cf1b3c6efb164d7c16da2be1e574289fef8bc5d6f51339f24e0e45364dbf"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.009721 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.011156 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" event={"ID":"4d279934-ea93-46eb-9f1a-cfc03bb52ac1","Type":"ContainerStarted","Data":"44cfed9a19eca1348012bdc4331078c2f277cdd4950aae4567c2bb19590a35d8"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.011202 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" event={"ID":"4d279934-ea93-46eb-9f1a-cfc03bb52ac1","Type":"ContainerStarted","Data":"2ef6bb8bcf07f904120fb506f60fa864f8a1ffae9c121f4fc695a9502e7534b8"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.012781 4810 generic.go:334] "Generic (PLEG): container finished" podID="e238e120-5ce2-4690-87ca-a950ad8afa8c" containerID="425cbdd23671ef7c1b776559f8cb8d340f1ba25b1448c11f1b277109e4160182" exitCode=0 Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.012835 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" event={"ID":"e238e120-5ce2-4690-87ca-a950ad8afa8c","Type":"ContainerDied","Data":"425cbdd23671ef7c1b776559f8cb8d340f1ba25b1448c11f1b277109e4160182"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.014126 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"f2efb66951ced3ebf220e8b53de970df9a4d818430544a1fff43879b0a79a4da"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.014172 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"825e036ef34763ba08aada7b1319fe795602bddedb7b3f8d81ff9d2e18ea7532"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.015667 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"10fdbc17864d7da9eeab5b0d0f0123e1b5f731fa3d5c08b8bca51b30fff90937"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.015716 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3aa78beef0b7f6acf70195872e2cb752b205d3599dc6bd2822465d39a98e0536"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.017004 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" event={"ID":"a8e183dc-1f17-4b0f-a177-e4a17569b307","Type":"ContainerStarted","Data":"95ab82b7eb5877ebe03b5b9f0367adbb6fa8dacdd3dba375e771c84874100d8a"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.017146 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.019079 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" event={"ID":"1884cc58-91e6-45ba-b5f0-061c85b25798","Type":"ContainerStarted","Data":"33eea64543b5eb31c0f825657887496f063ab508ea04fed9e1219e1368494eae"} Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.019385 4810 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-tsnwg container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.019431 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" podUID="a8e183dc-1f17-4b0f-a177-e4a17569b307" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.019993 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.020382 4810 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6hchn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.020414 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.020628 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.037808 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.037924 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.537907585 +0000 UTC m=+151.063546286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.069973 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.072488 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dhpq4" podStartSLOduration=131.072449548 podStartE2EDuration="2m11.072449548s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.068863562 +0000 UTC m=+150.594502273" watchObservedRunningTime="2025-10-09 00:09:13.072449548 +0000 UTC m=+150.598088249" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.075089 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.083756 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.583726412 +0000 UTC m=+151.109365113 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.147080 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-tmpb8" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.156114 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" podStartSLOduration=130.156093825 podStartE2EDuration="2m10.156093825s" podCreationTimestamp="2025-10-09 00:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.152987073 +0000 UTC m=+150.678625794" watchObservedRunningTime="2025-10-09 00:09:13.156093825 +0000 UTC m=+150.681732526" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.156438 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" podStartSLOduration=131.156432185 podStartE2EDuration="2m11.156432185s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.120902813 +0000 UTC m=+150.646541514" watchObservedRunningTime="2025-10-09 00:09:13.156432185 +0000 UTC m=+150.682070896" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.177398 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.177911 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.67789404 +0000 UTC m=+151.203532741 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.200537 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-z2zxv" podStartSLOduration=131.2005171 podStartE2EDuration="2m11.2005171s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.199194511 +0000 UTC m=+150.724833212" watchObservedRunningTime="2025-10-09 00:09:13.2005171 +0000 UTC m=+150.726155811" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.218858 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-blbp4" podStartSLOduration=131.218839673 podStartE2EDuration="2m11.218839673s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.21670357 +0000 UTC m=+150.742342271" watchObservedRunningTime="2025-10-09 00:09:13.218839673 +0000 UTC m=+150.744478374" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.244783 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-tjgjx" podStartSLOduration=130.244765291 podStartE2EDuration="2m10.244765291s" podCreationTimestamp="2025-10-09 00:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.244310407 +0000 UTC m=+150.769949108" watchObservedRunningTime="2025-10-09 00:09:13.244765291 +0000 UTC m=+150.770403992" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.281503 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.282009 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.781998593 +0000 UTC m=+151.307637294 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.289103 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-w5fvk" podStartSLOduration=130.289087013 podStartE2EDuration="2m10.289087013s" podCreationTimestamp="2025-10-09 00:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.287726073 +0000 UTC m=+150.813364764" watchObservedRunningTime="2025-10-09 00:09:13.289087013 +0000 UTC m=+150.814725724" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.310087 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.310463 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.332082 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-q7x6z" podStartSLOduration=8.332063486 podStartE2EDuration="8.332063486s" podCreationTimestamp="2025-10-09 00:09:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.327509051 +0000 UTC m=+150.853147742" watchObservedRunningTime="2025-10-09 00:09:13.332063486 +0000 UTC m=+150.857702187" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.382279 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.382701 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.882686305 +0000 UTC m=+151.408325006 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.383575 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" podStartSLOduration=131.383558531 podStartE2EDuration="2m11.383558531s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.38287357 +0000 UTC m=+150.908512281" watchObservedRunningTime="2025-10-09 00:09:13.383558531 +0000 UTC m=+150.909197232" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.415228 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ssmlk" podStartSLOduration=131.415211948 podStartE2EDuration="2m11.415211948s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.412502468 +0000 UTC m=+150.938141169" watchObservedRunningTime="2025-10-09 00:09:13.415211948 +0000 UTC m=+150.940850669" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.417539 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wtglp"] Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.419279 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.422396 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.437409 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wtglp"] Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.484037 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-catalog-content\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.484079 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-utilities\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.484148 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbljd\" (UniqueName: \"kubernetes.io/projected/6191974f-e4e8-495e-a572-a264ecafce7d-kube-api-access-qbljd\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.484250 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.484536 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:13.984520719 +0000 UTC m=+151.510159420 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.496680 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7c4pt" podStartSLOduration=131.496661149 podStartE2EDuration="2m11.496661149s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:13.494229387 +0000 UTC m=+151.019868098" watchObservedRunningTime="2025-10-09 00:09:13.496661149 +0000 UTC m=+151.022299870" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.514780 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:13 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:13 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:13 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.514813 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.514852 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.584790 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.585099 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-catalog-content\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.585120 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-utilities\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.585147 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbljd\" (UniqueName: \"kubernetes.io/projected/6191974f-e4e8-495e-a572-a264ecafce7d-kube-api-access-qbljd\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.585486 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.085468998 +0000 UTC m=+151.611107700 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.585923 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-catalog-content\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.586200 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-utilities\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.639462 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jd56q"] Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.640758 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbljd\" (UniqueName: \"kubernetes.io/projected/6191974f-e4e8-495e-a572-a264ecafce7d-kube-api-access-qbljd\") pod \"community-operators-wtglp\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.641843 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.649149 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jd56q"] Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.654092 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.686416 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btkrq\" (UniqueName: \"kubernetes.io/projected/37593966-3644-43b1-98f8-1b37ac87161d-kube-api-access-btkrq\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.686487 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-utilities\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.686521 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.686556 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-catalog-content\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.686934 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.186923043 +0000 UTC m=+151.712561744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.732397 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.803192 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.803671 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btkrq\" (UniqueName: \"kubernetes.io/projected/37593966-3644-43b1-98f8-1b37ac87161d-kube-api-access-btkrq\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.803725 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-utilities\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.803768 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-catalog-content\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.806525 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-catalog-content\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.806656 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.306634428 +0000 UTC m=+151.832273129 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.806899 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-utilities\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.807980 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h9xzp"] Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.811152 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.842030 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h9xzp"] Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.885066 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btkrq\" (UniqueName: \"kubernetes.io/projected/37593966-3644-43b1-98f8-1b37ac87161d-kube-api-access-btkrq\") pod \"certified-operators-jd56q\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.904518 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjrpl\" (UniqueName: \"kubernetes.io/projected/4c20606c-ba53-4b7a-8eac-e663fdc2a550-kube-api-access-gjrpl\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.904556 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-catalog-content\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.904599 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:13 crc kubenswrapper[4810]: I1009 00:09:13.904657 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-utilities\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:13 crc kubenswrapper[4810]: E1009 00:09:13.904958 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.404946899 +0000 UTC m=+151.930585600 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.001216 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.003304 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h6k7x"] Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.004226 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.005672 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.005942 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-utilities\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.005989 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjrpl\" (UniqueName: \"kubernetes.io/projected/4c20606c-ba53-4b7a-8eac-e663fdc2a550-kube-api-access-gjrpl\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.006008 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-catalog-content\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.006430 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-catalog-content\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.006504 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.506488626 +0000 UTC m=+152.032127327 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.006724 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-utilities\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.042019 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h6k7x"] Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.076734 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" event={"ID":"e238e120-5ce2-4690-87ca-a950ad8afa8c","Type":"ContainerStarted","Data":"565e7c2a6b89a72b0fd83dcd286b9d4f3ccaa237ded670e2e9d71d7632f12fb7"} Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.076773 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.076902 4810 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6hchn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.076946 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.089991 4810 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-289v7 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" start-of-body= Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.090038 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" podUID="e5bc3465-32cf-4253-8a62-d05b03903515" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": dial tcp 10.217.0.38:5443: connect: connection refused" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.090095 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjrpl\" (UniqueName: \"kubernetes.io/projected/4c20606c-ba53-4b7a-8eac-e663fdc2a550-kube-api-access-gjrpl\") pod \"community-operators-h9xzp\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.093360 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-tsnwg" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.108078 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-utilities\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.108236 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-catalog-content\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.108510 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfqvl\" (UniqueName: \"kubernetes.io/projected/20d2c462-bd84-47ea-9b74-81aead40fcf1-kube-api-access-pfqvl\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.108743 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.123642 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.623627254 +0000 UTC m=+152.149265955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.178764 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" podStartSLOduration=132.178747586 podStartE2EDuration="2m12.178747586s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:14.145059649 +0000 UTC m=+151.670698350" watchObservedRunningTime="2025-10-09 00:09:14.178747586 +0000 UTC m=+151.704386287" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.189485 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.214560 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.214920 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-utilities\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.215029 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-catalog-content\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.215143 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfqvl\" (UniqueName: \"kubernetes.io/projected/20d2c462-bd84-47ea-9b74-81aead40fcf1-kube-api-access-pfqvl\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.215883 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.715869426 +0000 UTC m=+152.241508127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.216681 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-utilities\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.217206 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-catalog-content\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.266795 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfqvl\" (UniqueName: \"kubernetes.io/projected/20d2c462-bd84-47ea-9b74-81aead40fcf1-kube-api-access-pfqvl\") pod \"certified-operators-h6k7x\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.319329 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.320897 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.820882265 +0000 UTC m=+152.346520966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.350623 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.421311 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.421653 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:14.921639889 +0000 UTC m=+152.447278590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.449543 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.516704 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:14 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:14 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:14 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.516975 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.524442 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.538915 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.038890761 +0000 UTC m=+152.564529472 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.579358 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jd56q"] Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.625572 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.626032 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.125807495 +0000 UTC m=+152.651446196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.685852 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wtglp"] Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.727491 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.727805 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.227794605 +0000 UTC m=+152.753433306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.811002 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h9xzp"] Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.829101 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.829412 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.329379823 +0000 UTC m=+152.855018514 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.850526 4810 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 09 00:09:14 crc kubenswrapper[4810]: I1009 00:09:14.931779 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:14 crc kubenswrapper[4810]: E1009 00:09:14.932110 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.432098134 +0000 UTC m=+152.957736835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.032942 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:15 crc kubenswrapper[4810]: E1009 00:09:15.033167 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.533142027 +0000 UTC m=+153.058780728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.098343 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" event={"ID":"41b0a7b8-e506-4c0b-a36c-6103481cc6e1","Type":"ContainerStarted","Data":"1485fd09b5c42e54d354412c91463ee7b71d9aea24645db4db2a3171bd3ae501"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.098386 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" event={"ID":"41b0a7b8-e506-4c0b-a36c-6103481cc6e1","Type":"ContainerStarted","Data":"a203d7edaf0360a64a4065b9df5d22b0f236aa89c3aef526eb9bf6a2490e49bc"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.100143 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerStarted","Data":"16d78f21a593636009749c61a0d2ea624aaf2fce09e68a70b353e6aaf1010225"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.104184 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h6k7x"] Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.108017 4810 generic.go:334] "Generic (PLEG): container finished" podID="6191974f-e4e8-495e-a572-a264ecafce7d" containerID="4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10" exitCode=0 Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.108103 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wtglp" event={"ID":"6191974f-e4e8-495e-a572-a264ecafce7d","Type":"ContainerDied","Data":"4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.108138 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wtglp" event={"ID":"6191974f-e4e8-495e-a572-a264ecafce7d","Type":"ContainerStarted","Data":"bdfc9c8171bb4c866ead0486215024cc39bb798a84eed991d669a1d42b799830"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.110726 4810 generic.go:334] "Generic (PLEG): container finished" podID="37593966-3644-43b1-98f8-1b37ac87161d" containerID="7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e" exitCode=0 Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.110836 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd56q" event={"ID":"37593966-3644-43b1-98f8-1b37ac87161d","Type":"ContainerDied","Data":"7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.110868 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd56q" event={"ID":"37593966-3644-43b1-98f8-1b37ac87161d","Type":"ContainerStarted","Data":"300d57a343d23f2acaef7239a254210ce091aa985a6b5711e99a6b05227bb366"} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.111843 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.127376 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-55r74" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.134944 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: E1009 00:09:15.135413 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.635394464 +0000 UTC m=+153.161033165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: W1009 00:09:15.136893 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20d2c462_bd84_47ea_9b74_81aead40fcf1.slice/crio-a27550e10873ecc20def4859052274d8b70bc1fdc91920a65b9632dc9b6d821f WatchSource:0}: Error finding container a27550e10873ecc20def4859052274d8b70bc1fdc91920a65b9632dc9b6d821f: Status 404 returned error can't find the container with id a27550e10873ecc20def4859052274d8b70bc1fdc91920a65b9632dc9b6d821f Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.235998 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:15 crc kubenswrapper[4810]: E1009 00:09:15.237041 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.737015664 +0000 UTC m=+153.262654365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.338357 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: E1009 00:09:15.338731 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.838715475 +0000 UTC m=+153.364354166 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.439594 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:15 crc kubenswrapper[4810]: E1009 00:09:15.439739 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.939717726 +0000 UTC m=+153.465356427 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.439853 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: E1009 00:09:15.440152 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 00:09:15.940145049 +0000 UTC m=+153.465783750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vfwx7" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.450365 4810 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-09T00:09:14.85055376Z","Handler":null,"Name":""} Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.474278 4810 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.474329 4810 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.506271 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:15 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:15 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:15 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.506571 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.540497 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.561440 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.598929 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-44mqx"] Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.600094 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.603886 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.610949 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-44mqx"] Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.642556 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.642621 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9rvk\" (UniqueName: \"kubernetes.io/projected/948a6dfa-fe30-4f84-a43f-dd5163884f0a-kube-api-access-r9rvk\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.642695 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-catalog-content\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.642722 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-utilities\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.652732 4810 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.652772 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.693206 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-289v7" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.715471 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vfwx7\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.741020 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.746581 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-catalog-content\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.746632 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-utilities\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.746683 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9rvk\" (UniqueName: \"kubernetes.io/projected/948a6dfa-fe30-4f84-a43f-dd5163884f0a-kube-api-access-r9rvk\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.747194 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-catalog-content\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.747518 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-utilities\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.771652 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9rvk\" (UniqueName: \"kubernetes.io/projected/948a6dfa-fe30-4f84-a43f-dd5163884f0a-kube-api-access-r9rvk\") pod \"redhat-marketplace-44mqx\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.912892 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.997442 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-shkgk"] Oct 09 00:09:15 crc kubenswrapper[4810]: I1009 00:09:15.998441 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.010095 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-shkgk"] Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.050561 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-catalog-content\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.050619 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx9nx\" (UniqueName: \"kubernetes.io/projected/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-kube-api-access-tx9nx\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.050653 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-utilities\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.064744 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vfwx7"] Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.144873 4810 generic.go:334] "Generic (PLEG): container finished" podID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerID="de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230" exitCode=0 Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.144965 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6k7x" event={"ID":"20d2c462-bd84-47ea-9b74-81aead40fcf1","Type":"ContainerDied","Data":"de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230"} Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.146177 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6k7x" event={"ID":"20d2c462-bd84-47ea-9b74-81aead40fcf1","Type":"ContainerStarted","Data":"a27550e10873ecc20def4859052274d8b70bc1fdc91920a65b9632dc9b6d821f"} Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.152173 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-catalog-content\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.152235 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx9nx\" (UniqueName: \"kubernetes.io/projected/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-kube-api-access-tx9nx\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.152271 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-utilities\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.152831 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-utilities\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.156677 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-catalog-content\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.167075 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" event={"ID":"a2bb4c7a-46ee-4294-ac9f-97a89488515d","Type":"ContainerStarted","Data":"4b35c8125defe2079b904c22bb1c9dd3ca3fefeebb9e874421da3a3294fc363c"} Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.170813 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" event={"ID":"41b0a7b8-e506-4c0b-a36c-6103481cc6e1","Type":"ContainerStarted","Data":"321e3a2d286ed3a9e0b2f1ab42350670ab128104ccd50e8cb1024f3db7c98218"} Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.174755 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx9nx\" (UniqueName: \"kubernetes.io/projected/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-kube-api-access-tx9nx\") pod \"redhat-marketplace-shkgk\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.179071 4810 generic.go:334] "Generic (PLEG): container finished" podID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerID="a61e0b061f0a1f78a0963134c7f82158f1634a70c5adf58259b7b6787c5f6e18" exitCode=0 Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.179957 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerDied","Data":"a61e0b061f0a1f78a0963134c7f82158f1634a70c5adf58259b7b6787c5f6e18"} Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.193200 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-g8p2l" podStartSLOduration=11.193184887 podStartE2EDuration="11.193184887s" podCreationTimestamp="2025-10-09 00:09:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:16.192892849 +0000 UTC m=+153.718531570" watchObservedRunningTime="2025-10-09 00:09:16.193184887 +0000 UTC m=+153.718823588" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.325295 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.396353 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-44mqx"] Oct 09 00:09:16 crc kubenswrapper[4810]: W1009 00:09:16.401501 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod948a6dfa_fe30_4f84_a43f_dd5163884f0a.slice/crio-304a86e7c35bd1fb15a31f9b1c7128c361c37f50832c8ec42b087deb2f4d6e89 WatchSource:0}: Error finding container 304a86e7c35bd1fb15a31f9b1c7128c361c37f50832c8ec42b087deb2f4d6e89: Status 404 returned error can't find the container with id 304a86e7c35bd1fb15a31f9b1c7128c361c37f50832c8ec42b087deb2f4d6e89 Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.506493 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.506971 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:16 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:16 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:16 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.507727 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.512495 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-ch5jb" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.607144 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wtz45"] Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.608021 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.610513 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.662541 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wtz45"] Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.674335 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-catalog-content\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.674395 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlrpl\" (UniqueName: \"kubernetes.io/projected/357773dc-61ec-484f-908d-844f1105047b-kube-api-access-nlrpl\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.674437 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-utilities\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.739741 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-shkgk"] Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.776659 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-catalog-content\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.776760 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlrpl\" (UniqueName: \"kubernetes.io/projected/357773dc-61ec-484f-908d-844f1105047b-kube-api-access-nlrpl\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.776886 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-utilities\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.777394 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-catalog-content\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.777435 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-utilities\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.794681 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlrpl\" (UniqueName: \"kubernetes.io/projected/357773dc-61ec-484f-908d-844f1105047b-kube-api-access-nlrpl\") pod \"redhat-operators-wtz45\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: W1009 00:09:16.805441 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42fb0613_71d0_4fd2_ab0b_b14f08c8a8a3.slice/crio-e399a9d39848622f06042b965a3e6230390e616216a5eefb69a1dda1aa78a82f WatchSource:0}: Error finding container e399a9d39848622f06042b965a3e6230390e616216a5eefb69a1dda1aa78a82f: Status 404 returned error can't find the container with id e399a9d39848622f06042b965a3e6230390e616216a5eefb69a1dda1aa78a82f Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.930399 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.997748 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4p5qb"] Oct 09 00:09:16 crc kubenswrapper[4810]: I1009 00:09:16.998700 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.054620 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4p5qb"] Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.059811 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.061481 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.064542 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.064831 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.079230 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-catalog-content\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.079268 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvg2c\" (UniqueName: \"kubernetes.io/projected/a3843149-4468-40d3-b941-50317b090419-kube-api-access-vvg2c\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.079299 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-utilities\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.083932 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.179282 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wtz45"] Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180198 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180259 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-catalog-content\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180277 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180301 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvg2c\" (UniqueName: \"kubernetes.io/projected/a3843149-4468-40d3-b941-50317b090419-kube-api-access-vvg2c\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180332 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-utilities\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180726 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-utilities\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.180922 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-catalog-content\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.198457 4810 generic.go:334] "Generic (PLEG): container finished" podID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerID="614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1" exitCode=0 Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.198744 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-shkgk" event={"ID":"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3","Type":"ContainerDied","Data":"614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1"} Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.198797 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-shkgk" event={"ID":"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3","Type":"ContainerStarted","Data":"e399a9d39848622f06042b965a3e6230390e616216a5eefb69a1dda1aa78a82f"} Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.201669 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" event={"ID":"a2bb4c7a-46ee-4294-ac9f-97a89488515d","Type":"ContainerStarted","Data":"c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160"} Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.201864 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvg2c\" (UniqueName: \"kubernetes.io/projected/a3843149-4468-40d3-b941-50317b090419-kube-api-access-vvg2c\") pod \"redhat-operators-4p5qb\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.202052 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:17 crc kubenswrapper[4810]: W1009 00:09:17.203779 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod357773dc_61ec_484f_908d_844f1105047b.slice/crio-524961216df4bbde837ea18b3f6d940b0586707d71b6b3de20c1c31d0a0ec22e WatchSource:0}: Error finding container 524961216df4bbde837ea18b3f6d940b0586707d71b6b3de20c1c31d0a0ec22e: Status 404 returned error can't find the container with id 524961216df4bbde837ea18b3f6d940b0586707d71b6b3de20c1c31d0a0ec22e Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.205622 4810 generic.go:334] "Generic (PLEG): container finished" podID="c82928f7-de15-44e5-a60d-2494d90cc554" containerID="1c0c064d8fe2fa27dbbccb76e429bc67b8abfecadd05c5c4b633d0d1bde5dd17" exitCode=0 Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.205678 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" event={"ID":"c82928f7-de15-44e5-a60d-2494d90cc554","Type":"ContainerDied","Data":"1c0c064d8fe2fa27dbbccb76e429bc67b8abfecadd05c5c4b633d0d1bde5dd17"} Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.207205 4810 generic.go:334] "Generic (PLEG): container finished" podID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerID="37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2" exitCode=0 Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.208430 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44mqx" event={"ID":"948a6dfa-fe30-4f84-a43f-dd5163884f0a","Type":"ContainerDied","Data":"37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2"} Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.208447 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44mqx" event={"ID":"948a6dfa-fe30-4f84-a43f-dd5163884f0a","Type":"ContainerStarted","Data":"304a86e7c35bd1fb15a31f9b1c7128c361c37f50832c8ec42b087deb2f4d6e89"} Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.256031 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" podStartSLOduration=135.256010529 podStartE2EDuration="2m15.256010529s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:17.248095364 +0000 UTC m=+154.773734075" watchObservedRunningTime="2025-10-09 00:09:17.256010529 +0000 UTC m=+154.781649230" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.281966 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.282375 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.283896 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.286004 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.304500 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.385995 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.392085 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.507343 4810 patch_prober.go:28] interesting pod/router-default-5444994796-78fsr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 00:09:17 crc kubenswrapper[4810]: [-]has-synced failed: reason withheld Oct 09 00:09:17 crc kubenswrapper[4810]: [+]process-running ok Oct 09 00:09:17 crc kubenswrapper[4810]: healthz check failed Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.507403 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-78fsr" podUID="08622f6b-de6d-48bc-998d-435e09a52226" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.754869 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 00:09:17 crc kubenswrapper[4810]: I1009 00:09:17.836745 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4p5qb"] Oct 09 00:09:17 crc kubenswrapper[4810]: W1009 00:09:17.848031 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3843149_4468_40d3_b941_50317b090419.slice/crio-f535d05afcb0b3d2dc1a98ae1e38d4414fe0e5479ab3d5047558624a37b1e97b WatchSource:0}: Error finding container f535d05afcb0b3d2dc1a98ae1e38d4414fe0e5479ab3d5047558624a37b1e97b: Status 404 returned error can't find the container with id f535d05afcb0b3d2dc1a98ae1e38d4414fe0e5479ab3d5047558624a37b1e97b Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.018542 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gwvst" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.236306 4810 generic.go:334] "Generic (PLEG): container finished" podID="357773dc-61ec-484f-908d-844f1105047b" containerID="71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468" exitCode=0 Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.236613 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wtz45" event={"ID":"357773dc-61ec-484f-908d-844f1105047b","Type":"ContainerDied","Data":"71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468"} Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.236639 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wtz45" event={"ID":"357773dc-61ec-484f-908d-844f1105047b","Type":"ContainerStarted","Data":"524961216df4bbde837ea18b3f6d940b0586707d71b6b3de20c1c31d0a0ec22e"} Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.240137 4810 generic.go:334] "Generic (PLEG): container finished" podID="a3843149-4468-40d3-b941-50317b090419" containerID="29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae" exitCode=0 Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.240206 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p5qb" event={"ID":"a3843149-4468-40d3-b941-50317b090419","Type":"ContainerDied","Data":"29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae"} Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.240237 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p5qb" event={"ID":"a3843149-4468-40d3-b941-50317b090419","Type":"ContainerStarted","Data":"f535d05afcb0b3d2dc1a98ae1e38d4414fe0e5479ab3d5047558624a37b1e97b"} Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.252954 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"496c69cb-4cc1-4a5b-9b5b-084707fc11e6","Type":"ContainerStarted","Data":"2864ca0024dd29777800b4777ca4cce37681d9f5a4348f292a96a6e9be19a576"} Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.489623 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.489668 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.489680 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.489722 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.525111 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.529320 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.636297 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.726078 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82928f7-de15-44e5-a60d-2494d90cc554-secret-volume\") pod \"c82928f7-de15-44e5-a60d-2494d90cc554\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.726129 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcq65\" (UniqueName: \"kubernetes.io/projected/c82928f7-de15-44e5-a60d-2494d90cc554-kube-api-access-qcq65\") pod \"c82928f7-de15-44e5-a60d-2494d90cc554\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.726192 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82928f7-de15-44e5-a60d-2494d90cc554-config-volume\") pod \"c82928f7-de15-44e5-a60d-2494d90cc554\" (UID: \"c82928f7-de15-44e5-a60d-2494d90cc554\") " Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.727459 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82928f7-de15-44e5-a60d-2494d90cc554-config-volume" (OuterVolumeSpecName: "config-volume") pod "c82928f7-de15-44e5-a60d-2494d90cc554" (UID: "c82928f7-de15-44e5-a60d-2494d90cc554"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.731596 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c82928f7-de15-44e5-a60d-2494d90cc554-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c82928f7-de15-44e5-a60d-2494d90cc554" (UID: "c82928f7-de15-44e5-a60d-2494d90cc554"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.732353 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c82928f7-de15-44e5-a60d-2494d90cc554-kube-api-access-qcq65" (OuterVolumeSpecName: "kube-api-access-qcq65") pod "c82928f7-de15-44e5-a60d-2494d90cc554" (UID: "c82928f7-de15-44e5-a60d-2494d90cc554"). InnerVolumeSpecName "kube-api-access-qcq65". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.789781 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.827755 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c82928f7-de15-44e5-a60d-2494d90cc554-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.827791 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcq65\" (UniqueName: \"kubernetes.io/projected/c82928f7-de15-44e5-a60d-2494d90cc554-kube-api-access-qcq65\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.827801 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c82928f7-de15-44e5-a60d-2494d90cc554-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.901592 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.901650 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.904766 4810 patch_prober.go:28] interesting pod/console-f9d7485db-dhpq4 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Oct 09 00:09:18 crc kubenswrapper[4810]: I1009 00:09:18.905066 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dhpq4" podUID="2eb13d08-4b13-48c5-a5a9-84407effd402" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.304672 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" event={"ID":"c82928f7-de15-44e5-a60d-2494d90cc554","Type":"ContainerDied","Data":"adbaf1f2b8fc3b4838f765455f7005f6686b9f6820e8b72042bc6817726d811b"} Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.304733 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adbaf1f2b8fc3b4838f765455f7005f6686b9f6820e8b72042bc6817726d811b" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.304831 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332800-6m95d" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.309374 4810 generic.go:334] "Generic (PLEG): container finished" podID="496c69cb-4cc1-4a5b-9b5b-084707fc11e6" containerID="393e2c2e0ccb92f73328b9f06b88799c8200a6d0a587fdd98e072df1c7af1718" exitCode=0 Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.310446 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"496c69cb-4cc1-4a5b-9b5b-084707fc11e6","Type":"ContainerDied","Data":"393e2c2e0ccb92f73328b9f06b88799c8200a6d0a587fdd98e072df1c7af1718"} Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.315676 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-78fsr" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.570225 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 00:09:19 crc kubenswrapper[4810]: E1009 00:09:19.570923 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82928f7-de15-44e5-a60d-2494d90cc554" containerName="collect-profiles" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.570945 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82928f7-de15-44e5-a60d-2494d90cc554" containerName="collect-profiles" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.571191 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c82928f7-de15-44e5-a60d-2494d90cc554" containerName="collect-profiles" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.571803 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.572174 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.578921 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.579089 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.639715 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.639769 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.740868 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.740925 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.740995 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.761629 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:19 crc kubenswrapper[4810]: I1009 00:09:19.904153 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:21 crc kubenswrapper[4810]: I1009 00:09:21.184627 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:09:21 crc kubenswrapper[4810]: I1009 00:09:21.184949 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:09:23 crc kubenswrapper[4810]: I1009 00:09:23.859774 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-q7x6z" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.240122 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.246391 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f74b5f49-e104-4aa7-9472-14d1e706785c-metrics-certs\") pod \"network-metrics-daemon-xpz29\" (UID: \"f74b5f49-e104-4aa7-9472-14d1e706785c\") " pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.369862 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xpz29" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.491442 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.543030 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kube-api-access\") pod \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.543092 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kubelet-dir\") pod \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\" (UID: \"496c69cb-4cc1-4a5b-9b5b-084707fc11e6\") " Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.543502 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "496c69cb-4cc1-4a5b-9b5b-084707fc11e6" (UID: "496c69cb-4cc1-4a5b-9b5b-084707fc11e6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.548971 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496c69cb-4cc1-4a5b-9b5b-084707fc11e6" (UID: "496c69cb-4cc1-4a5b-9b5b-084707fc11e6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.644409 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:25 crc kubenswrapper[4810]: I1009 00:09:25.644449 4810 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/496c69cb-4cc1-4a5b-9b5b-084707fc11e6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:26 crc kubenswrapper[4810]: I1009 00:09:26.362451 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"496c69cb-4cc1-4a5b-9b5b-084707fc11e6","Type":"ContainerDied","Data":"2864ca0024dd29777800b4777ca4cce37681d9f5a4348f292a96a6e9be19a576"} Oct 09 00:09:26 crc kubenswrapper[4810]: I1009 00:09:26.362754 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2864ca0024dd29777800b4777ca4cce37681d9f5a4348f292a96a6e9be19a576" Oct 09 00:09:26 crc kubenswrapper[4810]: I1009 00:09:26.362655 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 00:09:27 crc kubenswrapper[4810]: I1009 00:09:27.563390 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-xpz29"] Oct 09 00:09:27 crc kubenswrapper[4810]: I1009 00:09:27.601756 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 00:09:27 crc kubenswrapper[4810]: W1009 00:09:27.606928 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podc91e079c_a647_4dbe_9c0b_4b3feff1b8dd.slice/crio-3a2bd0e4feda8cae2fb1916fd66207e3646bd06f1404ea15801c94fc2b2ffc9f WatchSource:0}: Error finding container 3a2bd0e4feda8cae2fb1916fd66207e3646bd06f1404ea15801c94fc2b2ffc9f: Status 404 returned error can't find the container with id 3a2bd0e4feda8cae2fb1916fd66207e3646bd06f1404ea15801c94fc2b2ffc9f Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.374987 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd","Type":"ContainerStarted","Data":"81c4e6251b094eb611088146064c23f8ec7ef8c5efe6bafdd086020a780fc36e"} Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.375267 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd","Type":"ContainerStarted","Data":"3a2bd0e4feda8cae2fb1916fd66207e3646bd06f1404ea15801c94fc2b2ffc9f"} Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.375856 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xpz29" event={"ID":"f74b5f49-e104-4aa7-9472-14d1e706785c","Type":"ContainerStarted","Data":"73b733e004df1258098fe882ad2583f5c75772a4923e43aeca1fbb2db5261776"} Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.489642 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.489766 4810 patch_prober.go:28] interesting pod/downloads-7954f5f757-685td container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.489784 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.489694 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-685td" podUID="2b7db5ab-0ff2-457c-8daf-53c06748f04e" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.906646 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:28 crc kubenswrapper[4810]: I1009 00:09:28.910708 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dhpq4" Oct 09 00:09:29 crc kubenswrapper[4810]: I1009 00:09:29.384640 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xpz29" event={"ID":"f74b5f49-e104-4aa7-9472-14d1e706785c","Type":"ContainerStarted","Data":"22dbe1f77702a27d21482111097e14b95b3b7589e41fb7262c09647a28cad9bb"} Oct 09 00:09:30 crc kubenswrapper[4810]: I1009 00:09:30.388696 4810 generic.go:334] "Generic (PLEG): container finished" podID="c91e079c-a647-4dbe-9c0b-4b3feff1b8dd" containerID="81c4e6251b094eb611088146064c23f8ec7ef8c5efe6bafdd086020a780fc36e" exitCode=0 Oct 09 00:09:30 crc kubenswrapper[4810]: I1009 00:09:30.388742 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd","Type":"ContainerDied","Data":"81c4e6251b094eb611088146064c23f8ec7ef8c5efe6bafdd086020a780fc36e"} Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.032324 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.187165 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kubelet-dir\") pod \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.187304 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kube-api-access\") pod \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\" (UID: \"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd\") " Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.187331 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c91e079c-a647-4dbe-9c0b-4b3feff1b8dd" (UID: "c91e079c-a647-4dbe-9c0b-4b3feff1b8dd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.187715 4810 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.196360 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c91e079c-a647-4dbe-9c0b-4b3feff1b8dd" (UID: "c91e079c-a647-4dbe-9c0b-4b3feff1b8dd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.289651 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c91e079c-a647-4dbe-9c0b-4b3feff1b8dd-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.401860 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"c91e079c-a647-4dbe-9c0b-4b3feff1b8dd","Type":"ContainerDied","Data":"3a2bd0e4feda8cae2fb1916fd66207e3646bd06f1404ea15801c94fc2b2ffc9f"} Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.401937 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a2bd0e4feda8cae2fb1916fd66207e3646bd06f1404ea15801c94fc2b2ffc9f" Oct 09 00:09:32 crc kubenswrapper[4810]: I1009 00:09:32.401943 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 00:09:35 crc kubenswrapper[4810]: I1009 00:09:35.748402 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:09:38 crc kubenswrapper[4810]: I1009 00:09:38.494173 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-685td" Oct 09 00:09:45 crc kubenswrapper[4810]: I1009 00:09:45.480528 4810 generic.go:334] "Generic (PLEG): container finished" podID="788a06a3-b34e-460e-a981-67130389de67" containerID="da615b4c48b8727820296812a50f24b02a8dd91fb3c322cc06ff7edfd6d285ed" exitCode=0 Oct 09 00:09:45 crc kubenswrapper[4810]: I1009 00:09:45.480639 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29332800-wtmbd" event={"ID":"788a06a3-b34e-460e-a981-67130389de67","Type":"ContainerDied","Data":"da615b4c48b8727820296812a50f24b02a8dd91fb3c322cc06ff7edfd6d285ed"} Oct 09 00:09:48 crc kubenswrapper[4810]: E1009 00:09:48.585779 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 09 00:09:48 crc kubenswrapper[4810]: E1009 00:09:48.585977 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qbljd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-wtglp_openshift-marketplace(6191974f-e4e8-495e-a572-a264ecafce7d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 00:09:48 crc kubenswrapper[4810]: E1009 00:09:48.587186 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-wtglp" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" Oct 09 00:09:48 crc kubenswrapper[4810]: E1009 00:09:48.953589 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 09 00:09:48 crc kubenswrapper[4810]: E1009 00:09:48.953770 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tx9nx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-shkgk_openshift-marketplace(42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 00:09:48 crc kubenswrapper[4810]: E1009 00:09:48.955097 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-shkgk" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" Oct 09 00:09:49 crc kubenswrapper[4810]: I1009 00:09:49.104810 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qlkkp" Oct 09 00:09:50 crc kubenswrapper[4810]: E1009 00:09:50.248120 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-shkgk" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" Oct 09 00:09:50 crc kubenswrapper[4810]: E1009 00:09:50.248218 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-wtglp" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.291963 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.337719 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/788a06a3-b34e-460e-a981-67130389de67-serviceca\") pod \"788a06a3-b34e-460e-a981-67130389de67\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.337775 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k59h6\" (UniqueName: \"kubernetes.io/projected/788a06a3-b34e-460e-a981-67130389de67-kube-api-access-k59h6\") pod \"788a06a3-b34e-460e-a981-67130389de67\" (UID: \"788a06a3-b34e-460e-a981-67130389de67\") " Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.338457 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/788a06a3-b34e-460e-a981-67130389de67-serviceca" (OuterVolumeSpecName: "serviceca") pod "788a06a3-b34e-460e-a981-67130389de67" (UID: "788a06a3-b34e-460e-a981-67130389de67"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.343026 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788a06a3-b34e-460e-a981-67130389de67-kube-api-access-k59h6" (OuterVolumeSpecName: "kube-api-access-k59h6") pod "788a06a3-b34e-460e-a981-67130389de67" (UID: "788a06a3-b34e-460e-a981-67130389de67"). InnerVolumeSpecName "kube-api-access-k59h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:09:50 crc kubenswrapper[4810]: E1009 00:09:50.350130 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 09 00:09:50 crc kubenswrapper[4810]: E1009 00:09:50.350266 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pfqvl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-h6k7x_openshift-marketplace(20d2c462-bd84-47ea-9b74-81aead40fcf1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 00:09:50 crc kubenswrapper[4810]: E1009 00:09:50.351435 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-h6k7x" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.439799 4810 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/788a06a3-b34e-460e-a981-67130389de67-serviceca\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.439870 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k59h6\" (UniqueName: \"kubernetes.io/projected/788a06a3-b34e-460e-a981-67130389de67-kube-api-access-k59h6\") on node \"crc\" DevicePath \"\"" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.513730 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29332800-wtmbd" Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.513717 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29332800-wtmbd" event={"ID":"788a06a3-b34e-460e-a981-67130389de67","Type":"ContainerDied","Data":"61667b47c9fbc393fa5ba3ab02e86eadd474ab93eb47d10a4f027c665976f2e0"} Oct 09 00:09:50 crc kubenswrapper[4810]: I1009 00:09:50.514063 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61667b47c9fbc393fa5ba3ab02e86eadd474ab93eb47d10a4f027c665976f2e0" Oct 09 00:09:51 crc kubenswrapper[4810]: I1009 00:09:51.184613 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:09:51 crc kubenswrapper[4810]: I1009 00:09:51.185060 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:09:51 crc kubenswrapper[4810]: I1009 00:09:51.291089 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 00:09:53 crc kubenswrapper[4810]: E1009 00:09:53.178533 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-h6k7x" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" Oct 09 00:09:53 crc kubenswrapper[4810]: E1009 00:09:53.253898 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 09 00:09:53 crc kubenswrapper[4810]: E1009 00:09:53.254092 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vvg2c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4p5qb_openshift-marketplace(a3843149-4468-40d3-b941-50317b090419): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 00:09:53 crc kubenswrapper[4810]: E1009 00:09:53.255326 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4p5qb" podUID="a3843149-4468-40d3-b941-50317b090419" Oct 09 00:09:53 crc kubenswrapper[4810]: I1009 00:09:53.529015 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xpz29" event={"ID":"f74b5f49-e104-4aa7-9472-14d1e706785c","Type":"ContainerStarted","Data":"ddec228486e9d94d2f61f70749fb72ee611406427b012452ab5bc79a0e88a892"} Oct 09 00:09:53 crc kubenswrapper[4810]: I1009 00:09:53.530892 4810 generic.go:334] "Generic (PLEG): container finished" podID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerID="5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3" exitCode=0 Oct 09 00:09:53 crc kubenswrapper[4810]: I1009 00:09:53.530888 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44mqx" event={"ID":"948a6dfa-fe30-4f84-a43f-dd5163884f0a","Type":"ContainerDied","Data":"5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3"} Oct 09 00:09:53 crc kubenswrapper[4810]: I1009 00:09:53.538303 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerStarted","Data":"e49a4de7d49e0e598df11f11a0ec38868a14fabb0c8d67ca7d7cbfc94ca630b6"} Oct 09 00:09:53 crc kubenswrapper[4810]: E1009 00:09:53.544283 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4p5qb" podUID="a3843149-4468-40d3-b941-50317b090419" Oct 09 00:09:53 crc kubenswrapper[4810]: I1009 00:09:53.553575 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-xpz29" podStartSLOduration=171.553556843 podStartE2EDuration="2m51.553556843s" podCreationTimestamp="2025-10-09 00:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:09:53.544931267 +0000 UTC m=+191.070570058" watchObservedRunningTime="2025-10-09 00:09:53.553556843 +0000 UTC m=+191.079195544" Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.548399 4810 generic.go:334] "Generic (PLEG): container finished" podID="37593966-3644-43b1-98f8-1b37ac87161d" containerID="8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f" exitCode=0 Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.548480 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd56q" event={"ID":"37593966-3644-43b1-98f8-1b37ac87161d","Type":"ContainerDied","Data":"8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f"} Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.559176 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44mqx" event={"ID":"948a6dfa-fe30-4f84-a43f-dd5163884f0a","Type":"ContainerStarted","Data":"6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88"} Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.561330 4810 generic.go:334] "Generic (PLEG): container finished" podID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerID="e49a4de7d49e0e598df11f11a0ec38868a14fabb0c8d67ca7d7cbfc94ca630b6" exitCode=0 Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.561401 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerDied","Data":"e49a4de7d49e0e598df11f11a0ec38868a14fabb0c8d67ca7d7cbfc94ca630b6"} Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.564588 4810 generic.go:334] "Generic (PLEG): container finished" podID="357773dc-61ec-484f-908d-844f1105047b" containerID="023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3" exitCode=0 Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.564683 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wtz45" event={"ID":"357773dc-61ec-484f-908d-844f1105047b","Type":"ContainerDied","Data":"023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3"} Oct 09 00:09:54 crc kubenswrapper[4810]: I1009 00:09:54.600604 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-44mqx" podStartSLOduration=2.705516159 podStartE2EDuration="39.600582057s" podCreationTimestamp="2025-10-09 00:09:15 +0000 UTC" firstStartedPulling="2025-10-09 00:09:17.209871792 +0000 UTC m=+154.735510483" lastFinishedPulling="2025-10-09 00:09:54.10493768 +0000 UTC m=+191.630576381" observedRunningTime="2025-10-09 00:09:54.597464195 +0000 UTC m=+192.123102906" watchObservedRunningTime="2025-10-09 00:09:54.600582057 +0000 UTC m=+192.126220748" Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.572333 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerStarted","Data":"45dd60ee738a814e7e7bbb9b70b960d6d93fe55ce87864accdbdbb3e7d864c7d"} Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.580019 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wtz45" event={"ID":"357773dc-61ec-484f-908d-844f1105047b","Type":"ContainerStarted","Data":"67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55"} Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.587102 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd56q" event={"ID":"37593966-3644-43b1-98f8-1b37ac87161d","Type":"ContainerStarted","Data":"7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506"} Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.591068 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h9xzp" podStartSLOduration=3.650541361 podStartE2EDuration="42.591053897s" podCreationTimestamp="2025-10-09 00:09:13 +0000 UTC" firstStartedPulling="2025-10-09 00:09:16.18413889 +0000 UTC m=+153.709777591" lastFinishedPulling="2025-10-09 00:09:55.124651426 +0000 UTC m=+192.650290127" observedRunningTime="2025-10-09 00:09:55.590388207 +0000 UTC m=+193.116026908" watchObservedRunningTime="2025-10-09 00:09:55.591053897 +0000 UTC m=+193.116692598" Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.617776 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jd56q" podStartSLOduration=2.762840024 podStartE2EDuration="42.617754387s" podCreationTimestamp="2025-10-09 00:09:13 +0000 UTC" firstStartedPulling="2025-10-09 00:09:15.130545391 +0000 UTC m=+152.656184092" lastFinishedPulling="2025-10-09 00:09:54.985459744 +0000 UTC m=+192.511098455" observedRunningTime="2025-10-09 00:09:55.615061598 +0000 UTC m=+193.140700319" watchObservedRunningTime="2025-10-09 00:09:55.617754387 +0000 UTC m=+193.143393088" Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.633028 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wtz45" podStartSLOduration=2.670787672 podStartE2EDuration="39.633013179s" podCreationTimestamp="2025-10-09 00:09:16 +0000 UTC" firstStartedPulling="2025-10-09 00:09:18.23810823 +0000 UTC m=+155.763746931" lastFinishedPulling="2025-10-09 00:09:55.200333737 +0000 UTC m=+192.725972438" observedRunningTime="2025-10-09 00:09:55.630776893 +0000 UTC m=+193.156415604" watchObservedRunningTime="2025-10-09 00:09:55.633013179 +0000 UTC m=+193.158651880" Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.913372 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:55 crc kubenswrapper[4810]: I1009 00:09:55.913427 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:56 crc kubenswrapper[4810]: I1009 00:09:56.059698 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:09:56 crc kubenswrapper[4810]: I1009 00:09:56.930978 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:56 crc kubenswrapper[4810]: I1009 00:09:56.931032 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:09:57 crc kubenswrapper[4810]: I1009 00:09:57.967225 4810 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wtz45" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="registry-server" probeResult="failure" output=< Oct 09 00:09:57 crc kubenswrapper[4810]: timeout: failed to connect service ":50051" within 1s Oct 09 00:09:57 crc kubenswrapper[4810]: > Oct 09 00:10:03 crc kubenswrapper[4810]: I1009 00:10:03.639563 4810 generic.go:334] "Generic (PLEG): container finished" podID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerID="04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f" exitCode=0 Oct 09 00:10:03 crc kubenswrapper[4810]: I1009 00:10:03.639772 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-shkgk" event={"ID":"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3","Type":"ContainerDied","Data":"04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f"} Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.002611 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.002687 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.072390 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.190686 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.190752 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.273752 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.693127 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:10:04 crc kubenswrapper[4810]: I1009 00:10:04.694139 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:10:05 crc kubenswrapper[4810]: I1009 00:10:05.654901 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-shkgk" event={"ID":"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3","Type":"ContainerStarted","Data":"00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201"} Oct 09 00:10:05 crc kubenswrapper[4810]: I1009 00:10:05.679722 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-shkgk" podStartSLOduration=3.359362538 podStartE2EDuration="50.679697864s" podCreationTimestamp="2025-10-09 00:09:15 +0000 UTC" firstStartedPulling="2025-10-09 00:09:17.200221726 +0000 UTC m=+154.725860427" lastFinishedPulling="2025-10-09 00:10:04.520557022 +0000 UTC m=+202.046195753" observedRunningTime="2025-10-09 00:10:05.674236962 +0000 UTC m=+203.199875683" watchObservedRunningTime="2025-10-09 00:10:05.679697864 +0000 UTC m=+203.205336595" Oct 09 00:10:05 crc kubenswrapper[4810]: I1009 00:10:05.975921 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:10:06 crc kubenswrapper[4810]: I1009 00:10:06.329032 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:10:06 crc kubenswrapper[4810]: I1009 00:10:06.329114 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:10:06 crc kubenswrapper[4810]: I1009 00:10:06.409930 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:10:06 crc kubenswrapper[4810]: I1009 00:10:06.693392 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h9xzp"] Oct 09 00:10:06 crc kubenswrapper[4810]: I1009 00:10:06.694317 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h9xzp" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="registry-server" containerID="cri-o://45dd60ee738a814e7e7bbb9b70b960d6d93fe55ce87864accdbdbb3e7d864c7d" gracePeriod=2 Oct 09 00:10:06 crc kubenswrapper[4810]: I1009 00:10:06.969942 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:10:07 crc kubenswrapper[4810]: I1009 00:10:07.007445 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:10:07 crc kubenswrapper[4810]: I1009 00:10:07.668525 4810 generic.go:334] "Generic (PLEG): container finished" podID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerID="329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e" exitCode=0 Oct 09 00:10:07 crc kubenswrapper[4810]: I1009 00:10:07.668583 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6k7x" event={"ID":"20d2c462-bd84-47ea-9b74-81aead40fcf1","Type":"ContainerDied","Data":"329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e"} Oct 09 00:10:07 crc kubenswrapper[4810]: I1009 00:10:07.671101 4810 generic.go:334] "Generic (PLEG): container finished" podID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerID="45dd60ee738a814e7e7bbb9b70b960d6d93fe55ce87864accdbdbb3e7d864c7d" exitCode=0 Oct 09 00:10:07 crc kubenswrapper[4810]: I1009 00:10:07.671689 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerDied","Data":"45dd60ee738a814e7e7bbb9b70b960d6d93fe55ce87864accdbdbb3e7d864c7d"} Oct 09 00:10:07 crc kubenswrapper[4810]: I1009 00:10:07.896675 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.036865 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-catalog-content\") pod \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.036977 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjrpl\" (UniqueName: \"kubernetes.io/projected/4c20606c-ba53-4b7a-8eac-e663fdc2a550-kube-api-access-gjrpl\") pod \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.037008 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-utilities\") pod \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\" (UID: \"4c20606c-ba53-4b7a-8eac-e663fdc2a550\") " Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.038034 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-utilities" (OuterVolumeSpecName: "utilities") pod "4c20606c-ba53-4b7a-8eac-e663fdc2a550" (UID: "4c20606c-ba53-4b7a-8eac-e663fdc2a550"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.042145 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c20606c-ba53-4b7a-8eac-e663fdc2a550-kube-api-access-gjrpl" (OuterVolumeSpecName: "kube-api-access-gjrpl") pod "4c20606c-ba53-4b7a-8eac-e663fdc2a550" (UID: "4c20606c-ba53-4b7a-8eac-e663fdc2a550"). InnerVolumeSpecName "kube-api-access-gjrpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.093516 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c20606c-ba53-4b7a-8eac-e663fdc2a550" (UID: "4c20606c-ba53-4b7a-8eac-e663fdc2a550"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.138795 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjrpl\" (UniqueName: \"kubernetes.io/projected/4c20606c-ba53-4b7a-8eac-e663fdc2a550-kube-api-access-gjrpl\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.138844 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.138857 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c20606c-ba53-4b7a-8eac-e663fdc2a550-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.680366 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h9xzp" event={"ID":"4c20606c-ba53-4b7a-8eac-e663fdc2a550","Type":"ContainerDied","Data":"16d78f21a593636009749c61a0d2ea624aaf2fce09e68a70b353e6aaf1010225"} Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.680456 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h9xzp" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.680786 4810 scope.go:117] "RemoveContainer" containerID="45dd60ee738a814e7e7bbb9b70b960d6d93fe55ce87864accdbdbb3e7d864c7d" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.700250 4810 scope.go:117] "RemoveContainer" containerID="e49a4de7d49e0e598df11f11a0ec38868a14fabb0c8d67ca7d7cbfc94ca630b6" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.720551 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h9xzp"] Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.720643 4810 scope.go:117] "RemoveContainer" containerID="a61e0b061f0a1f78a0963134c7f82158f1634a70c5adf58259b7b6787c5f6e18" Oct 09 00:10:08 crc kubenswrapper[4810]: I1009 00:10:08.722375 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h9xzp"] Oct 09 00:10:09 crc kubenswrapper[4810]: I1009 00:10:09.265776 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" path="/var/lib/kubelet/pods/4c20606c-ba53-4b7a-8eac-e663fdc2a550/volumes" Oct 09 00:10:10 crc kubenswrapper[4810]: I1009 00:10:10.699075 4810 generic.go:334] "Generic (PLEG): container finished" podID="6191974f-e4e8-495e-a572-a264ecafce7d" containerID="921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb" exitCode=0 Oct 09 00:10:10 crc kubenswrapper[4810]: I1009 00:10:10.699175 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wtglp" event={"ID":"6191974f-e4e8-495e-a572-a264ecafce7d","Type":"ContainerDied","Data":"921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb"} Oct 09 00:10:10 crc kubenswrapper[4810]: I1009 00:10:10.704650 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6k7x" event={"ID":"20d2c462-bd84-47ea-9b74-81aead40fcf1","Type":"ContainerStarted","Data":"c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997"} Oct 09 00:10:10 crc kubenswrapper[4810]: I1009 00:10:10.751479 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h6k7x" podStartSLOduration=3.804392958 podStartE2EDuration="57.751461277s" podCreationTimestamp="2025-10-09 00:09:13 +0000 UTC" firstStartedPulling="2025-10-09 00:09:16.174732661 +0000 UTC m=+153.700371352" lastFinishedPulling="2025-10-09 00:10:10.12180093 +0000 UTC m=+207.647439671" observedRunningTime="2025-10-09 00:10:10.748305305 +0000 UTC m=+208.273944056" watchObservedRunningTime="2025-10-09 00:10:10.751461277 +0000 UTC m=+208.277099998" Oct 09 00:10:11 crc kubenswrapper[4810]: I1009 00:10:11.711250 4810 generic.go:334] "Generic (PLEG): container finished" podID="a3843149-4468-40d3-b941-50317b090419" containerID="bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3" exitCode=0 Oct 09 00:10:11 crc kubenswrapper[4810]: I1009 00:10:11.711339 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p5qb" event={"ID":"a3843149-4468-40d3-b941-50317b090419","Type":"ContainerDied","Data":"bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3"} Oct 09 00:10:11 crc kubenswrapper[4810]: I1009 00:10:11.716242 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wtglp" event={"ID":"6191974f-e4e8-495e-a572-a264ecafce7d","Type":"ContainerStarted","Data":"2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530"} Oct 09 00:10:11 crc kubenswrapper[4810]: I1009 00:10:11.750429 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wtglp" podStartSLOduration=2.695783382 podStartE2EDuration="58.75040629s" podCreationTimestamp="2025-10-09 00:09:13 +0000 UTC" firstStartedPulling="2025-10-09 00:09:15.111481506 +0000 UTC m=+152.637120207" lastFinishedPulling="2025-10-09 00:10:11.166104424 +0000 UTC m=+208.691743115" observedRunningTime="2025-10-09 00:10:11.746511155 +0000 UTC m=+209.272149896" watchObservedRunningTime="2025-10-09 00:10:11.75040629 +0000 UTC m=+209.276044991" Oct 09 00:10:12 crc kubenswrapper[4810]: I1009 00:10:12.743320 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p5qb" event={"ID":"a3843149-4468-40d3-b941-50317b090419","Type":"ContainerStarted","Data":"275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba"} Oct 09 00:10:13 crc kubenswrapper[4810]: I1009 00:10:13.733167 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:10:13 crc kubenswrapper[4810]: I1009 00:10:13.733507 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:10:13 crc kubenswrapper[4810]: I1009 00:10:13.769644 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4p5qb" podStartSLOduration=3.834075725 podStartE2EDuration="57.769626917s" podCreationTimestamp="2025-10-09 00:09:16 +0000 UTC" firstStartedPulling="2025-10-09 00:09:18.246752876 +0000 UTC m=+155.772391577" lastFinishedPulling="2025-10-09 00:10:12.182304068 +0000 UTC m=+209.707942769" observedRunningTime="2025-10-09 00:10:13.769495124 +0000 UTC m=+211.295133845" watchObservedRunningTime="2025-10-09 00:10:13.769626917 +0000 UTC m=+211.295265628" Oct 09 00:10:13 crc kubenswrapper[4810]: I1009 00:10:13.813038 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:10:14 crc kubenswrapper[4810]: I1009 00:10:14.351634 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:10:14 crc kubenswrapper[4810]: I1009 00:10:14.352056 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:10:14 crc kubenswrapper[4810]: I1009 00:10:14.409102 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:10:16 crc kubenswrapper[4810]: I1009 00:10:16.367874 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:10:17 crc kubenswrapper[4810]: I1009 00:10:17.387394 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:10:17 crc kubenswrapper[4810]: I1009 00:10:17.387439 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:10:17 crc kubenswrapper[4810]: I1009 00:10:17.438918 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:10:17 crc kubenswrapper[4810]: I1009 00:10:17.807095 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.284218 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-shkgk"] Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.284746 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-shkgk" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="registry-server" containerID="cri-o://00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201" gracePeriod=2 Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.616195 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.678643 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx9nx\" (UniqueName: \"kubernetes.io/projected/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-kube-api-access-tx9nx\") pod \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.678698 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-catalog-content\") pod \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.678733 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-utilities\") pod \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\" (UID: \"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3\") " Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.679708 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-utilities" (OuterVolumeSpecName: "utilities") pod "42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" (UID: "42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.686218 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-kube-api-access-tx9nx" (OuterVolumeSpecName: "kube-api-access-tx9nx") pod "42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" (UID: "42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3"). InnerVolumeSpecName "kube-api-access-tx9nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.701580 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" (UID: "42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779569 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx9nx\" (UniqueName: \"kubernetes.io/projected/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-kube-api-access-tx9nx\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779603 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779612 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779726 4810 generic.go:334] "Generic (PLEG): container finished" podID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerID="00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201" exitCode=0 Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779774 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-shkgk" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779771 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-shkgk" event={"ID":"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3","Type":"ContainerDied","Data":"00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201"} Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779916 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-shkgk" event={"ID":"42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3","Type":"ContainerDied","Data":"e399a9d39848622f06042b965a3e6230390e616216a5eefb69a1dda1aa78a82f"} Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.779942 4810 scope.go:117] "RemoveContainer" containerID="00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.794749 4810 scope.go:117] "RemoveContainer" containerID="04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.810865 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-shkgk"] Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.826001 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-shkgk"] Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.831211 4810 scope.go:117] "RemoveContainer" containerID="614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.843464 4810 scope.go:117] "RemoveContainer" containerID="00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201" Oct 09 00:10:19 crc kubenswrapper[4810]: E1009 00:10:19.843887 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201\": container with ID starting with 00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201 not found: ID does not exist" containerID="00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.843930 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201"} err="failed to get container status \"00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201\": rpc error: code = NotFound desc = could not find container \"00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201\": container with ID starting with 00530e1b7a2eda029645f755ab0c268623f5a723d94ec4967b2c87e6ccdec201 not found: ID does not exist" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.843973 4810 scope.go:117] "RemoveContainer" containerID="04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f" Oct 09 00:10:19 crc kubenswrapper[4810]: E1009 00:10:19.844184 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f\": container with ID starting with 04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f not found: ID does not exist" containerID="04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.844226 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f"} err="failed to get container status \"04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f\": rpc error: code = NotFound desc = could not find container \"04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f\": container with ID starting with 04dd21e468c95072931069af3c9d261c1b07cf88df0d779fe0217ada464d3e9f not found: ID does not exist" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.844247 4810 scope.go:117] "RemoveContainer" containerID="614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1" Oct 09 00:10:19 crc kubenswrapper[4810]: E1009 00:10:19.844505 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1\": container with ID starting with 614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1 not found: ID does not exist" containerID="614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1" Oct 09 00:10:19 crc kubenswrapper[4810]: I1009 00:10:19.844535 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1"} err="failed to get container status \"614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1\": rpc error: code = NotFound desc = could not find container \"614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1\": container with ID starting with 614c286dde1549409e3baedbe5dc1d7f840414eaa7815032e5b767911cc63cb1 not found: ID does not exist" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.085961 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4p5qb"] Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.086197 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4p5qb" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="registry-server" containerID="cri-o://275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba" gracePeriod=2 Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.184655 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.185036 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.185099 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.185678 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.185732 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93" gracePeriod=600 Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.263860 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" path="/var/lib/kubelet/pods/42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3/volumes" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.443705 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.500036 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-utilities\") pod \"a3843149-4468-40d3-b941-50317b090419\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.500120 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-catalog-content\") pod \"a3843149-4468-40d3-b941-50317b090419\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.500174 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvg2c\" (UniqueName: \"kubernetes.io/projected/a3843149-4468-40d3-b941-50317b090419-kube-api-access-vvg2c\") pod \"a3843149-4468-40d3-b941-50317b090419\" (UID: \"a3843149-4468-40d3-b941-50317b090419\") " Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.501528 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-utilities" (OuterVolumeSpecName: "utilities") pod "a3843149-4468-40d3-b941-50317b090419" (UID: "a3843149-4468-40d3-b941-50317b090419"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.519157 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3843149-4468-40d3-b941-50317b090419-kube-api-access-vvg2c" (OuterVolumeSpecName: "kube-api-access-vvg2c") pod "a3843149-4468-40d3-b941-50317b090419" (UID: "a3843149-4468-40d3-b941-50317b090419"). InnerVolumeSpecName "kube-api-access-vvg2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.594019 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a3843149-4468-40d3-b941-50317b090419" (UID: "a3843149-4468-40d3-b941-50317b090419"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.601127 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.601160 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a3843149-4468-40d3-b941-50317b090419-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.601171 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvg2c\" (UniqueName: \"kubernetes.io/projected/a3843149-4468-40d3-b941-50317b090419-kube-api-access-vvg2c\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.795742 4810 generic.go:334] "Generic (PLEG): container finished" podID="a3843149-4468-40d3-b941-50317b090419" containerID="275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba" exitCode=0 Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.795813 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p5qb" event={"ID":"a3843149-4468-40d3-b941-50317b090419","Type":"ContainerDied","Data":"275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba"} Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.795850 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4p5qb" event={"ID":"a3843149-4468-40d3-b941-50317b090419","Type":"ContainerDied","Data":"f535d05afcb0b3d2dc1a98ae1e38d4414fe0e5479ab3d5047558624a37b1e97b"} Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.795870 4810 scope.go:117] "RemoveContainer" containerID="275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.795925 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4p5qb" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.798036 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93" exitCode=0 Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.798081 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93"} Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.798100 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"c6269592bf80ac0d143f3317419b8c107ac59ae5aaaebd6c80994bfff7891dd2"} Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.815772 4810 scope.go:117] "RemoveContainer" containerID="bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.837910 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4p5qb"] Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.840590 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4p5qb"] Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.845002 4810 scope.go:117] "RemoveContainer" containerID="29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.861080 4810 scope.go:117] "RemoveContainer" containerID="275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba" Oct 09 00:10:21 crc kubenswrapper[4810]: E1009 00:10:21.861603 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba\": container with ID starting with 275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba not found: ID does not exist" containerID="275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.861648 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba"} err="failed to get container status \"275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba\": rpc error: code = NotFound desc = could not find container \"275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba\": container with ID starting with 275574081fcc4901d449154935dfe0e14cf0faefc9689a65175745f89db227ba not found: ID does not exist" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.861676 4810 scope.go:117] "RemoveContainer" containerID="bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3" Oct 09 00:10:21 crc kubenswrapper[4810]: E1009 00:10:21.863046 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3\": container with ID starting with bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3 not found: ID does not exist" containerID="bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.863080 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3"} err="failed to get container status \"bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3\": rpc error: code = NotFound desc = could not find container \"bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3\": container with ID starting with bb24a251148f2d31fcf2fc7f1cffa8a3b5dba44dfca074e1ff3d49f696bd16b3 not found: ID does not exist" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.863096 4810 scope.go:117] "RemoveContainer" containerID="29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae" Oct 09 00:10:21 crc kubenswrapper[4810]: E1009 00:10:21.864108 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae\": container with ID starting with 29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae not found: ID does not exist" containerID="29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae" Oct 09 00:10:21 crc kubenswrapper[4810]: I1009 00:10:21.864154 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae"} err="failed to get container status \"29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae\": rpc error: code = NotFound desc = could not find container \"29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae\": container with ID starting with 29f1ea14610a3f0b53f0849d272c5674b5b4d89fdf9276237f8e9f95a0d78cae not found: ID does not exist" Oct 09 00:10:23 crc kubenswrapper[4810]: I1009 00:10:23.259716 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3843149-4468-40d3-b941-50317b090419" path="/var/lib/kubelet/pods/a3843149-4468-40d3-b941-50317b090419/volumes" Oct 09 00:10:23 crc kubenswrapper[4810]: I1009 00:10:23.814584 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:10:24 crc kubenswrapper[4810]: I1009 00:10:24.400219 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:10:26 crc kubenswrapper[4810]: I1009 00:10:26.890088 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h6k7x"] Oct 09 00:10:26 crc kubenswrapper[4810]: I1009 00:10:26.890993 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h6k7x" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="registry-server" containerID="cri-o://c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997" gracePeriod=2 Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.237446 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.374761 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfqvl\" (UniqueName: \"kubernetes.io/projected/20d2c462-bd84-47ea-9b74-81aead40fcf1-kube-api-access-pfqvl\") pod \"20d2c462-bd84-47ea-9b74-81aead40fcf1\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.374841 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-utilities\") pod \"20d2c462-bd84-47ea-9b74-81aead40fcf1\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.374875 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-catalog-content\") pod \"20d2c462-bd84-47ea-9b74-81aead40fcf1\" (UID: \"20d2c462-bd84-47ea-9b74-81aead40fcf1\") " Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.375846 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-utilities" (OuterVolumeSpecName: "utilities") pod "20d2c462-bd84-47ea-9b74-81aead40fcf1" (UID: "20d2c462-bd84-47ea-9b74-81aead40fcf1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.390525 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20d2c462-bd84-47ea-9b74-81aead40fcf1-kube-api-access-pfqvl" (OuterVolumeSpecName: "kube-api-access-pfqvl") pod "20d2c462-bd84-47ea-9b74-81aead40fcf1" (UID: "20d2c462-bd84-47ea-9b74-81aead40fcf1"). InnerVolumeSpecName "kube-api-access-pfqvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.419809 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20d2c462-bd84-47ea-9b74-81aead40fcf1" (UID: "20d2c462-bd84-47ea-9b74-81aead40fcf1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.476803 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfqvl\" (UniqueName: \"kubernetes.io/projected/20d2c462-bd84-47ea-9b74-81aead40fcf1-kube-api-access-pfqvl\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.476858 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.476873 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20d2c462-bd84-47ea-9b74-81aead40fcf1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.671245 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lsgnf"] Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.828784 4810 generic.go:334] "Generic (PLEG): container finished" podID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerID="c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997" exitCode=0 Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.828868 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6k7x" event={"ID":"20d2c462-bd84-47ea-9b74-81aead40fcf1","Type":"ContainerDied","Data":"c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997"} Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.828928 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6k7x" event={"ID":"20d2c462-bd84-47ea-9b74-81aead40fcf1","Type":"ContainerDied","Data":"a27550e10873ecc20def4859052274d8b70bc1fdc91920a65b9632dc9b6d821f"} Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.828950 4810 scope.go:117] "RemoveContainer" containerID="c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.829324 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6k7x" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.852657 4810 scope.go:117] "RemoveContainer" containerID="329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.867679 4810 scope.go:117] "RemoveContainer" containerID="de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.878354 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h6k7x"] Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.884311 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h6k7x"] Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.887110 4810 scope.go:117] "RemoveContainer" containerID="c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997" Oct 09 00:10:27 crc kubenswrapper[4810]: E1009 00:10:27.887543 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997\": container with ID starting with c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997 not found: ID does not exist" containerID="c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.887601 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997"} err="failed to get container status \"c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997\": rpc error: code = NotFound desc = could not find container \"c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997\": container with ID starting with c5f48282e1962aaaf3edc6acd4cc7f93e82cede2060cb1ca607de15b28e87997 not found: ID does not exist" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.887634 4810 scope.go:117] "RemoveContainer" containerID="329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e" Oct 09 00:10:27 crc kubenswrapper[4810]: E1009 00:10:27.888046 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e\": container with ID starting with 329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e not found: ID does not exist" containerID="329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.888075 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e"} err="failed to get container status \"329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e\": rpc error: code = NotFound desc = could not find container \"329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e\": container with ID starting with 329052bc16504f15401a8b0ffae7421b95c085293e4a2ae77e3f42469694926e not found: ID does not exist" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.888094 4810 scope.go:117] "RemoveContainer" containerID="de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230" Oct 09 00:10:27 crc kubenswrapper[4810]: E1009 00:10:27.888366 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230\": container with ID starting with de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230 not found: ID does not exist" containerID="de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230" Oct 09 00:10:27 crc kubenswrapper[4810]: I1009 00:10:27.888413 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230"} err="failed to get container status \"de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230\": rpc error: code = NotFound desc = could not find container \"de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230\": container with ID starting with de8d5db4a9c9ba2b0f8d70b55e03663ad46fbe7a2d76ed80a60178706444b230 not found: ID does not exist" Oct 09 00:10:29 crc kubenswrapper[4810]: I1009 00:10:29.259181 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" path="/var/lib/kubelet/pods/20d2c462-bd84-47ea-9b74-81aead40fcf1/volumes" Oct 09 00:10:52 crc kubenswrapper[4810]: I1009 00:10:52.702280 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" podUID="a9b072a5-a708-4f29-9aae-f52e98802f1c" containerName="oauth-openshift" containerID="cri-o://d951b312cd5ed576de194a44d66c3d41d5cb41c2efb6e3254fd328e65ae54051" gracePeriod=15 Oct 09 00:10:52 crc kubenswrapper[4810]: I1009 00:10:52.994288 4810 generic.go:334] "Generic (PLEG): container finished" podID="a9b072a5-a708-4f29-9aae-f52e98802f1c" containerID="d951b312cd5ed576de194a44d66c3d41d5cb41c2efb6e3254fd328e65ae54051" exitCode=0 Oct 09 00:10:52 crc kubenswrapper[4810]: I1009 00:10:52.994405 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" event={"ID":"a9b072a5-a708-4f29-9aae-f52e98802f1c","Type":"ContainerDied","Data":"d951b312cd5ed576de194a44d66c3d41d5cb41c2efb6e3254fd328e65ae54051"} Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.105763 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.152376 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6447dfb5d9-b55f9"] Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.152813 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.152887 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.152917 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="496c69cb-4cc1-4a5b-9b5b-084707fc11e6" containerName="pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.152938 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="496c69cb-4cc1-4a5b-9b5b-084707fc11e6" containerName="pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.152957 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.152975 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.152993 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153011 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153029 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153045 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153072 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153088 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153125 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c91e079c-a647-4dbe-9c0b-4b3feff1b8dd" containerName="pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153142 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c91e079c-a647-4dbe-9c0b-4b3feff1b8dd" containerName="pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153169 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153186 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="extract-content" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153212 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153230 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153255 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9b072a5-a708-4f29-9aae-f52e98802f1c" containerName="oauth-openshift" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153271 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9b072a5-a708-4f29-9aae-f52e98802f1c" containerName="oauth-openshift" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153297 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153315 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153339 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153356 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153382 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153400 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153422 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788a06a3-b34e-460e-a981-67130389de67" containerName="image-pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153438 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="788a06a3-b34e-460e-a981-67130389de67" containerName="image-pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153463 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153480 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: E1009 00:10:53.153507 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153525 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="extract-utilities" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153767 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="788a06a3-b34e-460e-a981-67130389de67" containerName="image-pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153811 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c20606c-ba53-4b7a-8eac-e663fdc2a550" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153867 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9b072a5-a708-4f29-9aae-f52e98802f1c" containerName="oauth-openshift" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153892 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c91e079c-a647-4dbe-9c0b-4b3feff1b8dd" containerName="pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153914 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="42fb0613-71d0-4fd2-ab0b-b14f08c8a8a3" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153941 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3843149-4468-40d3-b941-50317b090419" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153967 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="496c69cb-4cc1-4a5b-9b5b-084707fc11e6" containerName="pruner" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.153993 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="20d2c462-bd84-47ea-9b74-81aead40fcf1" containerName="registry-server" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.154684 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.173413 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6447dfb5d9-b55f9"] Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244086 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-session\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244162 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-idp-0-file-data\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244220 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-policies\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244280 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-trusted-ca-bundle\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244339 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-router-certs\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244368 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-dir\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244420 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-ocp-branding-template\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244470 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-service-ca\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244521 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244547 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-serving-cert\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244581 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-cliconfig\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244620 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz42n\" (UniqueName: \"kubernetes.io/projected/a9b072a5-a708-4f29-9aae-f52e98802f1c-kube-api-access-pz42n\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.244663 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-login\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.245293 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.245667 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.245717 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.245724 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.245925 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-error\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.245995 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-provider-selection\") pod \"a9b072a5-a708-4f29-9aae-f52e98802f1c\" (UID: \"a9b072a5-a708-4f29-9aae-f52e98802f1c\") " Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246234 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th68s\" (UniqueName: \"kubernetes.io/projected/4f922200-afe2-4234-8a27-1579335f6911-kube-api-access-th68s\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246333 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-service-ca\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246395 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246450 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-login\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246523 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246648 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246732 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-router-certs\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246782 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.246941 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4f922200-afe2-4234-8a27-1579335f6911-audit-dir\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247033 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-audit-policies\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247102 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-session\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247158 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247246 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-error\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247292 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247450 4810 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247484 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247507 4810 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a9b072a5-a708-4f29-9aae-f52e98802f1c-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247528 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.247548 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.250051 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.250677 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.251011 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.251131 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.251477 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.251935 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.252382 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.254201 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.255656 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9b072a5-a708-4f29-9aae-f52e98802f1c-kube-api-access-pz42n" (OuterVolumeSpecName: "kube-api-access-pz42n") pod "a9b072a5-a708-4f29-9aae-f52e98802f1c" (UID: "a9b072a5-a708-4f29-9aae-f52e98802f1c"). InnerVolumeSpecName "kube-api-access-pz42n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360018 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4f922200-afe2-4234-8a27-1579335f6911-audit-dir\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360138 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-audit-policies\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360176 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-session\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360211 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360201 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4f922200-afe2-4234-8a27-1579335f6911-audit-dir\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360287 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-error\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360323 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360380 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th68s\" (UniqueName: \"kubernetes.io/projected/4f922200-afe2-4234-8a27-1579335f6911-kube-api-access-th68s\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360424 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-service-ca\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360458 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-login\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360488 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360536 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360609 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360668 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-router-certs\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360715 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360799 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360841 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360861 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360881 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz42n\" (UniqueName: \"kubernetes.io/projected/a9b072a5-a708-4f29-9aae-f52e98802f1c-kube-api-access-pz42n\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360899 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360915 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360933 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360950 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.360966 4810 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a9b072a5-a708-4f29-9aae-f52e98802f1c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.361521 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-audit-policies\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.362748 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-service-ca\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.363323 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.363455 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.367075 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.367533 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-router-certs\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.367614 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.367955 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-login\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.369354 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-session\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.370062 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.370427 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.370587 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4f922200-afe2-4234-8a27-1579335f6911-v4-0-config-user-template-error\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.383951 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th68s\" (UniqueName: \"kubernetes.io/projected/4f922200-afe2-4234-8a27-1579335f6911-kube-api-access-th68s\") pod \"oauth-openshift-6447dfb5d9-b55f9\" (UID: \"4f922200-afe2-4234-8a27-1579335f6911\") " pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.473758 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:53 crc kubenswrapper[4810]: I1009 00:10:53.743894 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6447dfb5d9-b55f9"] Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.004072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" event={"ID":"4f922200-afe2-4234-8a27-1579335f6911","Type":"ContainerStarted","Data":"848293327c8dac342319654fe1d78e1731f48554e0f40bf476bfb1f65f5b1824"} Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.004144 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" event={"ID":"4f922200-afe2-4234-8a27-1579335f6911","Type":"ContainerStarted","Data":"9ad8c1d40e3a45aa1701a6da5b80f4f47f6a163804755913229afc8b0227533e"} Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.004802 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.007174 4810 patch_prober.go:28] interesting pod/oauth-openshift-6447dfb5d9-b55f9 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.56:6443/healthz\": dial tcp 10.217.0.56:6443: connect: connection refused" start-of-body= Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.007203 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" event={"ID":"a9b072a5-a708-4f29-9aae-f52e98802f1c","Type":"ContainerDied","Data":"fc69fc4248a40e9a53eb648bdadd2dbb6ce75540762fecad00145ce31d9aed2f"} Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.007234 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" podUID="4f922200-afe2-4234-8a27-1579335f6911" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.56:6443/healthz\": dial tcp 10.217.0.56:6443: connect: connection refused" Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.007270 4810 scope.go:117] "RemoveContainer" containerID="d951b312cd5ed576de194a44d66c3d41d5cb41c2efb6e3254fd328e65ae54051" Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.007276 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lsgnf" Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.043245 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" podStartSLOduration=27.043224376 podStartE2EDuration="27.043224376s" podCreationTimestamp="2025-10-09 00:10:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:10:54.026485891 +0000 UTC m=+251.552124602" watchObservedRunningTime="2025-10-09 00:10:54.043224376 +0000 UTC m=+251.568863067" Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.043413 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lsgnf"] Oct 09 00:10:54 crc kubenswrapper[4810]: I1009 00:10:54.046787 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lsgnf"] Oct 09 00:10:55 crc kubenswrapper[4810]: I1009 00:10:55.029913 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6447dfb5d9-b55f9" Oct 09 00:10:55 crc kubenswrapper[4810]: I1009 00:10:55.262136 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9b072a5-a708-4f29-9aae-f52e98802f1c" path="/var/lib/kubelet/pods/a9b072a5-a708-4f29-9aae-f52e98802f1c/volumes" Oct 09 00:11:04 crc kubenswrapper[4810]: I1009 00:11:04.991224 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jd56q"] Oct 09 00:11:04 crc kubenswrapper[4810]: I1009 00:11:04.991979 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jd56q" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="registry-server" containerID="cri-o://7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506" gracePeriod=30 Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.002036 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wtglp"] Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.002633 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wtglp" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="registry-server" containerID="cri-o://2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530" gracePeriod=30 Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.016148 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6hchn"] Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.016451 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" containerID="cri-o://98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e" gracePeriod=30 Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.026132 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-44mqx"] Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.026397 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-44mqx" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="registry-server" containerID="cri-o://6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88" gracePeriod=30 Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.030312 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fxcb7"] Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.034788 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wtz45"] Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.034894 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.035241 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wtz45" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="registry-server" containerID="cri-o://67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55" gracePeriod=30 Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.039578 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fxcb7"] Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.128606 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.128664 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.128753 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrjv6\" (UniqueName: \"kubernetes.io/projected/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-kube-api-access-jrjv6\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.229331 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.229381 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.229427 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrjv6\" (UniqueName: \"kubernetes.io/projected/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-kube-api-access-jrjv6\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.232117 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.250513 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.260783 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrjv6\" (UniqueName: \"kubernetes.io/projected/0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc-kube-api-access-jrjv6\") pod \"marketplace-operator-79b997595-fxcb7\" (UID: \"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc\") " pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.359016 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.375301 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.406961 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.413798 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.437072 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.487343 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.534728 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btkrq\" (UniqueName: \"kubernetes.io/projected/37593966-3644-43b1-98f8-1b37ac87161d-kube-api-access-btkrq\") pod \"37593966-3644-43b1-98f8-1b37ac87161d\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.534859 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-utilities\") pod \"6191974f-e4e8-495e-a572-a264ecafce7d\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.534894 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbljd\" (UniqueName: \"kubernetes.io/projected/6191974f-e4e8-495e-a572-a264ecafce7d-kube-api-access-qbljd\") pod \"6191974f-e4e8-495e-a572-a264ecafce7d\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.534928 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jv5h\" (UniqueName: \"kubernetes.io/projected/82a04500-5006-4149-a4db-1982b49a1fcd-kube-api-access-7jv5h\") pod \"82a04500-5006-4149-a4db-1982b49a1fcd\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.534960 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-catalog-content\") pod \"37593966-3644-43b1-98f8-1b37ac87161d\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.534989 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-utilities\") pod \"37593966-3644-43b1-98f8-1b37ac87161d\" (UID: \"37593966-3644-43b1-98f8-1b37ac87161d\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.535015 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-operator-metrics\") pod \"82a04500-5006-4149-a4db-1982b49a1fcd\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.535058 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-trusted-ca\") pod \"82a04500-5006-4149-a4db-1982b49a1fcd\" (UID: \"82a04500-5006-4149-a4db-1982b49a1fcd\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.535085 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-utilities\") pod \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.535110 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-catalog-content\") pod \"6191974f-e4e8-495e-a572-a264ecafce7d\" (UID: \"6191974f-e4e8-495e-a572-a264ecafce7d\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.535133 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9rvk\" (UniqueName: \"kubernetes.io/projected/948a6dfa-fe30-4f84-a43f-dd5163884f0a-kube-api-access-r9rvk\") pod \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.535160 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-catalog-content\") pod \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\" (UID: \"948a6dfa-fe30-4f84-a43f-dd5163884f0a\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.536370 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-utilities" (OuterVolumeSpecName: "utilities") pod "948a6dfa-fe30-4f84-a43f-dd5163884f0a" (UID: "948a6dfa-fe30-4f84-a43f-dd5163884f0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.536414 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-utilities" (OuterVolumeSpecName: "utilities") pod "6191974f-e4e8-495e-a572-a264ecafce7d" (UID: "6191974f-e4e8-495e-a572-a264ecafce7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.536906 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-utilities" (OuterVolumeSpecName: "utilities") pod "37593966-3644-43b1-98f8-1b37ac87161d" (UID: "37593966-3644-43b1-98f8-1b37ac87161d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.539798 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "82a04500-5006-4149-a4db-1982b49a1fcd" (UID: "82a04500-5006-4149-a4db-1982b49a1fcd"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.540720 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "82a04500-5006-4149-a4db-1982b49a1fcd" (UID: "82a04500-5006-4149-a4db-1982b49a1fcd"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.541029 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82a04500-5006-4149-a4db-1982b49a1fcd-kube-api-access-7jv5h" (OuterVolumeSpecName: "kube-api-access-7jv5h") pod "82a04500-5006-4149-a4db-1982b49a1fcd" (UID: "82a04500-5006-4149-a4db-1982b49a1fcd"). InnerVolumeSpecName "kube-api-access-7jv5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.541207 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948a6dfa-fe30-4f84-a43f-dd5163884f0a-kube-api-access-r9rvk" (OuterVolumeSpecName: "kube-api-access-r9rvk") pod "948a6dfa-fe30-4f84-a43f-dd5163884f0a" (UID: "948a6dfa-fe30-4f84-a43f-dd5163884f0a"). InnerVolumeSpecName "kube-api-access-r9rvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.543391 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6191974f-e4e8-495e-a572-a264ecafce7d-kube-api-access-qbljd" (OuterVolumeSpecName: "kube-api-access-qbljd") pod "6191974f-e4e8-495e-a572-a264ecafce7d" (UID: "6191974f-e4e8-495e-a572-a264ecafce7d"). InnerVolumeSpecName "kube-api-access-qbljd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.548385 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37593966-3644-43b1-98f8-1b37ac87161d-kube-api-access-btkrq" (OuterVolumeSpecName: "kube-api-access-btkrq") pod "37593966-3644-43b1-98f8-1b37ac87161d" (UID: "37593966-3644-43b1-98f8-1b37ac87161d"). InnerVolumeSpecName "kube-api-access-btkrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.566358 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "948a6dfa-fe30-4f84-a43f-dd5163884f0a" (UID: "948a6dfa-fe30-4f84-a43f-dd5163884f0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.593087 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37593966-3644-43b1-98f8-1b37ac87161d" (UID: "37593966-3644-43b1-98f8-1b37ac87161d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.614694 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6191974f-e4e8-495e-a572-a264ecafce7d" (UID: "6191974f-e4e8-495e-a572-a264ecafce7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.636633 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-utilities\") pod \"357773dc-61ec-484f-908d-844f1105047b\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.636759 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlrpl\" (UniqueName: \"kubernetes.io/projected/357773dc-61ec-484f-908d-844f1105047b-kube-api-access-nlrpl\") pod \"357773dc-61ec-484f-908d-844f1105047b\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.636802 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-catalog-content\") pod \"357773dc-61ec-484f-908d-844f1105047b\" (UID: \"357773dc-61ec-484f-908d-844f1105047b\") " Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637046 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btkrq\" (UniqueName: \"kubernetes.io/projected/37593966-3644-43b1-98f8-1b37ac87161d-kube-api-access-btkrq\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637069 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637083 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbljd\" (UniqueName: \"kubernetes.io/projected/6191974f-e4e8-495e-a572-a264ecafce7d-kube-api-access-qbljd\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637095 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jv5h\" (UniqueName: \"kubernetes.io/projected/82a04500-5006-4149-a4db-1982b49a1fcd-kube-api-access-7jv5h\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637107 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637118 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37593966-3644-43b1-98f8-1b37ac87161d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637130 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637141 4810 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82a04500-5006-4149-a4db-1982b49a1fcd-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637154 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637166 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6191974f-e4e8-495e-a572-a264ecafce7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637177 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9rvk\" (UniqueName: \"kubernetes.io/projected/948a6dfa-fe30-4f84-a43f-dd5163884f0a-kube-api-access-r9rvk\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637188 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/948a6dfa-fe30-4f84-a43f-dd5163884f0a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.637618 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-utilities" (OuterVolumeSpecName: "utilities") pod "357773dc-61ec-484f-908d-844f1105047b" (UID: "357773dc-61ec-484f-908d-844f1105047b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.641071 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/357773dc-61ec-484f-908d-844f1105047b-kube-api-access-nlrpl" (OuterVolumeSpecName: "kube-api-access-nlrpl") pod "357773dc-61ec-484f-908d-844f1105047b" (UID: "357773dc-61ec-484f-908d-844f1105047b"). InnerVolumeSpecName "kube-api-access-nlrpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.716972 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "357773dc-61ec-484f-908d-844f1105047b" (UID: "357773dc-61ec-484f-908d-844f1105047b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.737943 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlrpl\" (UniqueName: \"kubernetes.io/projected/357773dc-61ec-484f-908d-844f1105047b-kube-api-access-nlrpl\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.737998 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.738008 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/357773dc-61ec-484f-908d-844f1105047b-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:11:05 crc kubenswrapper[4810]: I1009 00:11:05.796553 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fxcb7"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.089447 4810 generic.go:334] "Generic (PLEG): container finished" podID="6191974f-e4e8-495e-a572-a264ecafce7d" containerID="2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530" exitCode=0 Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.089514 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wtglp" event={"ID":"6191974f-e4e8-495e-a572-a264ecafce7d","Type":"ContainerDied","Data":"2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.089870 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wtglp" event={"ID":"6191974f-e4e8-495e-a572-a264ecafce7d","Type":"ContainerDied","Data":"bdfc9c8171bb4c866ead0486215024cc39bb798a84eed991d669a1d42b799830"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.089893 4810 scope.go:117] "RemoveContainer" containerID="2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.089527 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wtglp" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.093752 4810 generic.go:334] "Generic (PLEG): container finished" podID="357773dc-61ec-484f-908d-844f1105047b" containerID="67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55" exitCode=0 Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.093876 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wtz45" event={"ID":"357773dc-61ec-484f-908d-844f1105047b","Type":"ContainerDied","Data":"67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.093912 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wtz45" event={"ID":"357773dc-61ec-484f-908d-844f1105047b","Type":"ContainerDied","Data":"524961216df4bbde837ea18b3f6d940b0586707d71b6b3de20c1c31d0a0ec22e"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.093971 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wtz45" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.099688 4810 generic.go:334] "Generic (PLEG): container finished" podID="37593966-3644-43b1-98f8-1b37ac87161d" containerID="7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506" exitCode=0 Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.099728 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jd56q" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.099781 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd56q" event={"ID":"37593966-3644-43b1-98f8-1b37ac87161d","Type":"ContainerDied","Data":"7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.099873 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jd56q" event={"ID":"37593966-3644-43b1-98f8-1b37ac87161d","Type":"ContainerDied","Data":"300d57a343d23f2acaef7239a254210ce091aa985a6b5711e99a6b05227bb366"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.102241 4810 generic.go:334] "Generic (PLEG): container finished" podID="82a04500-5006-4149-a4db-1982b49a1fcd" containerID="98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e" exitCode=0 Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.102281 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" event={"ID":"82a04500-5006-4149-a4db-1982b49a1fcd","Type":"ContainerDied","Data":"98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.102296 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" event={"ID":"82a04500-5006-4149-a4db-1982b49a1fcd","Type":"ContainerDied","Data":"5ea1d33092c6c2f38c12257f3d3f672e6411716419858d524b5382d2866f0a58"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.102336 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6hchn" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.103840 4810 scope.go:117] "RemoveContainer" containerID="921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.108703 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" event={"ID":"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc","Type":"ContainerStarted","Data":"26f1a63907225e455373d1b7e873891339c5036e0ed5d65de26f1d9f5bac7c87"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.108727 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" event={"ID":"0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc","Type":"ContainerStarted","Data":"939cdf5e8364c5361b782d0ec20d50ed8e2afcf3d64c7b14a66c5abe8293a989"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.109013 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.112637 4810 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-fxcb7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.112718 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" podUID="0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.116102 4810 generic.go:334] "Generic (PLEG): container finished" podID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerID="6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88" exitCode=0 Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.116152 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44mqx" event={"ID":"948a6dfa-fe30-4f84-a43f-dd5163884f0a","Type":"ContainerDied","Data":"6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.116183 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-44mqx" event={"ID":"948a6dfa-fe30-4f84-a43f-dd5163884f0a","Type":"ContainerDied","Data":"304a86e7c35bd1fb15a31f9b1c7128c361c37f50832c8ec42b087deb2f4d6e89"} Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.116442 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-44mqx" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.123180 4810 scope.go:117] "RemoveContainer" containerID="4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.126815 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" podStartSLOduration=1.126797287 podStartE2EDuration="1.126797287s" podCreationTimestamp="2025-10-09 00:11:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:11:06.125080135 +0000 UTC m=+263.650718856" watchObservedRunningTime="2025-10-09 00:11:06.126797287 +0000 UTC m=+263.652435988" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.139503 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wtz45"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.156803 4810 scope.go:117] "RemoveContainer" containerID="2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.158806 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530\": container with ID starting with 2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530 not found: ID does not exist" containerID="2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.158860 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530"} err="failed to get container status \"2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530\": rpc error: code = NotFound desc = could not find container \"2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530\": container with ID starting with 2f2ca507e92f1ddf19749e4c7d4ec8e7c830bd368c4beb8573e84f508f1da530 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.158886 4810 scope.go:117] "RemoveContainer" containerID="921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.160053 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb\": container with ID starting with 921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb not found: ID does not exist" containerID="921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.160072 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb"} err="failed to get container status \"921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb\": rpc error: code = NotFound desc = could not find container \"921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb\": container with ID starting with 921807d22b59280a6f9475638e0c306839b62253ef67d5e69409c2751ae4cdeb not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.160122 4810 scope.go:117] "RemoveContainer" containerID="4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.160452 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wtz45"] Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.160768 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10\": container with ID starting with 4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10 not found: ID does not exist" containerID="4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.160839 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10"} err="failed to get container status \"4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10\": rpc error: code = NotFound desc = could not find container \"4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10\": container with ID starting with 4143a50fd207f8594c2fc9430aaa8c438bf547ea1a3057a96e5dbfd0b7d6cc10 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.160878 4810 scope.go:117] "RemoveContainer" containerID="67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.166638 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jd56q"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.176674 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jd56q"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.179830 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wtglp"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.183199 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wtglp"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.185482 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6hchn"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.190853 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6hchn"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.199024 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-44mqx"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.199806 4810 scope.go:117] "RemoveContainer" containerID="023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.202678 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-44mqx"] Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.214895 4810 scope.go:117] "RemoveContainer" containerID="71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.234226 4810 scope.go:117] "RemoveContainer" containerID="67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.234747 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55\": container with ID starting with 67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55 not found: ID does not exist" containerID="67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.234786 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55"} err="failed to get container status \"67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55\": rpc error: code = NotFound desc = could not find container \"67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55\": container with ID starting with 67378fef5d792d6752c5646e146a56feebe21b7f7a5a4d8f67ad0d7df2d29d55 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.234812 4810 scope.go:117] "RemoveContainer" containerID="023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.235143 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3\": container with ID starting with 023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3 not found: ID does not exist" containerID="023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.235171 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3"} err="failed to get container status \"023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3\": rpc error: code = NotFound desc = could not find container \"023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3\": container with ID starting with 023ce397fb6f4b359568f6872a317f4c17f3af41a9b309144e7362f0f2de6cb3 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.235191 4810 scope.go:117] "RemoveContainer" containerID="71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.235414 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468\": container with ID starting with 71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468 not found: ID does not exist" containerID="71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.235447 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468"} err="failed to get container status \"71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468\": rpc error: code = NotFound desc = could not find container \"71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468\": container with ID starting with 71375142c53ed5c40e5eae04c4336e5938aaa0d50e63b40495cd193a70a37468 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.235466 4810 scope.go:117] "RemoveContainer" containerID="7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.248445 4810 scope.go:117] "RemoveContainer" containerID="8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.261647 4810 scope.go:117] "RemoveContainer" containerID="7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.278066 4810 scope.go:117] "RemoveContainer" containerID="7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.280413 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506\": container with ID starting with 7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506 not found: ID does not exist" containerID="7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.280454 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506"} err="failed to get container status \"7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506\": rpc error: code = NotFound desc = could not find container \"7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506\": container with ID starting with 7922bd40904a67f89e4fe3bc788126d774e1ceeea72975ab266e623b9a930506 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.280485 4810 scope.go:117] "RemoveContainer" containerID="8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.280905 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f\": container with ID starting with 8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f not found: ID does not exist" containerID="8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.280935 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f"} err="failed to get container status \"8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f\": rpc error: code = NotFound desc = could not find container \"8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f\": container with ID starting with 8cc3dcb82f0593e7d761df65de4f474e053ae5323d258ca1b0a1e9f5776a2d4f not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.280950 4810 scope.go:117] "RemoveContainer" containerID="7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.281125 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e\": container with ID starting with 7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e not found: ID does not exist" containerID="7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.281144 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e"} err="failed to get container status \"7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e\": rpc error: code = NotFound desc = could not find container \"7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e\": container with ID starting with 7553737b3531ff405f2c84f811b838ddfbccff9a76505a39a9445a347ebd325e not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.281159 4810 scope.go:117] "RemoveContainer" containerID="98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.296807 4810 scope.go:117] "RemoveContainer" containerID="98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.297348 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e\": container with ID starting with 98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e not found: ID does not exist" containerID="98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.297377 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e"} err="failed to get container status \"98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e\": rpc error: code = NotFound desc = could not find container \"98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e\": container with ID starting with 98c3dd7b2650feb9c3dd3cf69e193ced55618a3959304826882e5b9e59a8093e not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.297398 4810 scope.go:117] "RemoveContainer" containerID="6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.313636 4810 scope.go:117] "RemoveContainer" containerID="5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.325918 4810 scope.go:117] "RemoveContainer" containerID="37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.337107 4810 scope.go:117] "RemoveContainer" containerID="6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.337486 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88\": container with ID starting with 6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88 not found: ID does not exist" containerID="6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.337513 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88"} err="failed to get container status \"6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88\": rpc error: code = NotFound desc = could not find container \"6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88\": container with ID starting with 6b404b7ff00534495a682e515d98b1bdce10b5496accf60283cbabf5660d8e88 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.337534 4810 scope.go:117] "RemoveContainer" containerID="5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.337835 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3\": container with ID starting with 5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3 not found: ID does not exist" containerID="5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.337957 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3"} err="failed to get container status \"5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3\": rpc error: code = NotFound desc = could not find container \"5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3\": container with ID starting with 5a5416a419a228c8be43501c04fcfe2818165c8ff98e2146e8c8a727402f33d3 not found: ID does not exist" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.338065 4810 scope.go:117] "RemoveContainer" containerID="37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2" Oct 09 00:11:06 crc kubenswrapper[4810]: E1009 00:11:06.338395 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2\": container with ID starting with 37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2 not found: ID does not exist" containerID="37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2" Oct 09 00:11:06 crc kubenswrapper[4810]: I1009 00:11:06.338424 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2"} err="failed to get container status \"37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2\": rpc error: code = NotFound desc = could not find container \"37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2\": container with ID starting with 37a91d6a55a8838e4917a8513a718fd7b8db1da151f18bc682f3cb70e4144ae2 not found: ID does not exist" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.127801 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fxcb7" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.205926 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lgslb"] Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206099 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206109 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206120 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206126 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206133 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206140 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206150 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206155 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206162 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206168 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206178 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206183 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206192 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206197 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206206 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206212 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206219 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206225 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206235 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206241 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206249 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206254 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="extract-content" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206260 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206266 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" Oct 09 00:11:07 crc kubenswrapper[4810]: E1009 00:11:07.206274 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206280 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="extract-utilities" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206351 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206361 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="37593966-3644-43b1-98f8-1b37ac87161d" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206369 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" containerName="marketplace-operator" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206377 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="357773dc-61ec-484f-908d-844f1105047b" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.206385 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" containerName="registry-server" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.207121 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.209526 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.219517 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lgslb"] Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.260022 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="357773dc-61ec-484f-908d-844f1105047b" path="/var/lib/kubelet/pods/357773dc-61ec-484f-908d-844f1105047b/volumes" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.260756 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37593966-3644-43b1-98f8-1b37ac87161d" path="/var/lib/kubelet/pods/37593966-3644-43b1-98f8-1b37ac87161d/volumes" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.261637 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6191974f-e4e8-495e-a572-a264ecafce7d" path="/var/lib/kubelet/pods/6191974f-e4e8-495e-a572-a264ecafce7d/volumes" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.263137 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82a04500-5006-4149-a4db-1982b49a1fcd" path="/var/lib/kubelet/pods/82a04500-5006-4149-a4db-1982b49a1fcd/volumes" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.263620 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948a6dfa-fe30-4f84-a43f-dd5163884f0a" path="/var/lib/kubelet/pods/948a6dfa-fe30-4f84-a43f-dd5163884f0a/volumes" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.359969 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tjhl\" (UniqueName: \"kubernetes.io/projected/14c7ea46-7eeb-4c37-84d1-d3072de592c7-kube-api-access-7tjhl\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.360213 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c7ea46-7eeb-4c37-84d1-d3072de592c7-utilities\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.360468 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c7ea46-7eeb-4c37-84d1-d3072de592c7-catalog-content\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.410762 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t2q59"] Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.411870 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.413977 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.419924 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t2q59"] Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.461729 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c7ea46-7eeb-4c37-84d1-d3072de592c7-catalog-content\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.461995 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tjhl\" (UniqueName: \"kubernetes.io/projected/14c7ea46-7eeb-4c37-84d1-d3072de592c7-kube-api-access-7tjhl\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.462083 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c7ea46-7eeb-4c37-84d1-d3072de592c7-utilities\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.462208 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14c7ea46-7eeb-4c37-84d1-d3072de592c7-catalog-content\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.462548 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14c7ea46-7eeb-4c37-84d1-d3072de592c7-utilities\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.491724 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tjhl\" (UniqueName: \"kubernetes.io/projected/14c7ea46-7eeb-4c37-84d1-d3072de592c7-kube-api-access-7tjhl\") pod \"certified-operators-lgslb\" (UID: \"14c7ea46-7eeb-4c37-84d1-d3072de592c7\") " pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.525546 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.563369 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-catalog-content\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.563442 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-utilities\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.563521 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6js4x\" (UniqueName: \"kubernetes.io/projected/10919026-d45c-44ee-ba00-58329f902133-kube-api-access-6js4x\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.664796 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-utilities\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.665220 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6js4x\" (UniqueName: \"kubernetes.io/projected/10919026-d45c-44ee-ba00-58329f902133-kube-api-access-6js4x\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.665263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-catalog-content\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.665387 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-utilities\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.668275 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-catalog-content\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.685351 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6js4x\" (UniqueName: \"kubernetes.io/projected/10919026-d45c-44ee-ba00-58329f902133-kube-api-access-6js4x\") pod \"redhat-marketplace-t2q59\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.741489 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lgslb"] Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.745709 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:07 crc kubenswrapper[4810]: W1009 00:11:07.767464 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14c7ea46_7eeb_4c37_84d1_d3072de592c7.slice/crio-20fd13d53bbbf0568c2a911c4a89ca54b1863d6a75327b4e79003f3a7b879e6f WatchSource:0}: Error finding container 20fd13d53bbbf0568c2a911c4a89ca54b1863d6a75327b4e79003f3a7b879e6f: Status 404 returned error can't find the container with id 20fd13d53bbbf0568c2a911c4a89ca54b1863d6a75327b4e79003f3a7b879e6f Oct 09 00:11:07 crc kubenswrapper[4810]: I1009 00:11:07.944545 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t2q59"] Oct 09 00:11:08 crc kubenswrapper[4810]: I1009 00:11:08.133425 4810 generic.go:334] "Generic (PLEG): container finished" podID="14c7ea46-7eeb-4c37-84d1-d3072de592c7" containerID="fbc000f1b7736c2d555b907829ca0e0b36019186441b52cd1ffe9bf965a5bf49" exitCode=0 Oct 09 00:11:08 crc kubenswrapper[4810]: I1009 00:11:08.133487 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgslb" event={"ID":"14c7ea46-7eeb-4c37-84d1-d3072de592c7","Type":"ContainerDied","Data":"fbc000f1b7736c2d555b907829ca0e0b36019186441b52cd1ffe9bf965a5bf49"} Oct 09 00:11:08 crc kubenswrapper[4810]: I1009 00:11:08.133511 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgslb" event={"ID":"14c7ea46-7eeb-4c37-84d1-d3072de592c7","Type":"ContainerStarted","Data":"20fd13d53bbbf0568c2a911c4a89ca54b1863d6a75327b4e79003f3a7b879e6f"} Oct 09 00:11:08 crc kubenswrapper[4810]: I1009 00:11:08.135596 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerStarted","Data":"3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c"} Oct 09 00:11:08 crc kubenswrapper[4810]: I1009 00:11:08.135623 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerStarted","Data":"bd093962a444ed26a403bbb0d951a514d1796f2165f0c9830742460224abfd97"} Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.141935 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgslb" event={"ID":"14c7ea46-7eeb-4c37-84d1-d3072de592c7","Type":"ContainerStarted","Data":"33592ee21253dbe3bbc8c3a6e8ae86dc8eeb44e10d5506fac56d1a69677d918f"} Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.146285 4810 generic.go:334] "Generic (PLEG): container finished" podID="10919026-d45c-44ee-ba00-58329f902133" containerID="3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c" exitCode=0 Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.146328 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerDied","Data":"3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c"} Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.617694 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-plszl"] Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.619174 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.621166 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.630307 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-plszl"] Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.795270 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff3acf97-a548-45d1-9afe-0d228613d06b-utilities\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.795346 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff3acf97-a548-45d1-9afe-0d228613d06b-catalog-content\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.795472 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdpvw\" (UniqueName: \"kubernetes.io/projected/ff3acf97-a548-45d1-9afe-0d228613d06b-kube-api-access-vdpvw\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.811622 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p6bh5"] Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.816060 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.820993 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.824458 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p6bh5"] Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.897175 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff3acf97-a548-45d1-9afe-0d228613d06b-catalog-content\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.897568 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdpvw\" (UniqueName: \"kubernetes.io/projected/ff3acf97-a548-45d1-9afe-0d228613d06b-kube-api-access-vdpvw\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.897674 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff3acf97-a548-45d1-9afe-0d228613d06b-utilities\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.898103 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff3acf97-a548-45d1-9afe-0d228613d06b-utilities\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.898273 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff3acf97-a548-45d1-9afe-0d228613d06b-catalog-content\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.919563 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdpvw\" (UniqueName: \"kubernetes.io/projected/ff3acf97-a548-45d1-9afe-0d228613d06b-kube-api-access-vdpvw\") pod \"redhat-operators-plszl\" (UID: \"ff3acf97-a548-45d1-9afe-0d228613d06b\") " pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.937157 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.998420 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b8d67b8-aebd-42c7-98fe-5730ea71524e-catalog-content\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.998479 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzpv5\" (UniqueName: \"kubernetes.io/projected/7b8d67b8-aebd-42c7-98fe-5730ea71524e-kube-api-access-pzpv5\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:09 crc kubenswrapper[4810]: I1009 00:11:09.998512 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b8d67b8-aebd-42c7-98fe-5730ea71524e-utilities\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.099926 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b8d67b8-aebd-42c7-98fe-5730ea71524e-utilities\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.100028 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b8d67b8-aebd-42c7-98fe-5730ea71524e-catalog-content\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.100057 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzpv5\" (UniqueName: \"kubernetes.io/projected/7b8d67b8-aebd-42c7-98fe-5730ea71524e-kube-api-access-pzpv5\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.100429 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b8d67b8-aebd-42c7-98fe-5730ea71524e-utilities\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.100512 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b8d67b8-aebd-42c7-98fe-5730ea71524e-catalog-content\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.109700 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-plszl"] Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.127757 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzpv5\" (UniqueName: \"kubernetes.io/projected/7b8d67b8-aebd-42c7-98fe-5730ea71524e-kube-api-access-pzpv5\") pod \"community-operators-p6bh5\" (UID: \"7b8d67b8-aebd-42c7-98fe-5730ea71524e\") " pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.128506 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.156460 4810 generic.go:334] "Generic (PLEG): container finished" podID="14c7ea46-7eeb-4c37-84d1-d3072de592c7" containerID="33592ee21253dbe3bbc8c3a6e8ae86dc8eeb44e10d5506fac56d1a69677d918f" exitCode=0 Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.156554 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgslb" event={"ID":"14c7ea46-7eeb-4c37-84d1-d3072de592c7","Type":"ContainerDied","Data":"33592ee21253dbe3bbc8c3a6e8ae86dc8eeb44e10d5506fac56d1a69677d918f"} Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.160375 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerStarted","Data":"d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce"} Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.163961 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plszl" event={"ID":"ff3acf97-a548-45d1-9afe-0d228613d06b","Type":"ContainerStarted","Data":"1b27b6cd2bc697426d21da0af15214ae3238156f73a81ebc772a737e800683c2"} Oct 09 00:11:10 crc kubenswrapper[4810]: I1009 00:11:10.315202 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p6bh5"] Oct 09 00:11:10 crc kubenswrapper[4810]: W1009 00:11:10.381838 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b8d67b8_aebd_42c7_98fe_5730ea71524e.slice/crio-cef07d8b855174720774bb3daad148ac9fe69deedc6cf64e305676bc4b2819a0 WatchSource:0}: Error finding container cef07d8b855174720774bb3daad148ac9fe69deedc6cf64e305676bc4b2819a0: Status 404 returned error can't find the container with id cef07d8b855174720774bb3daad148ac9fe69deedc6cf64e305676bc4b2819a0 Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.173224 4810 generic.go:334] "Generic (PLEG): container finished" podID="10919026-d45c-44ee-ba00-58329f902133" containerID="d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce" exitCode=0 Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.173301 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerDied","Data":"d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce"} Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.174096 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerStarted","Data":"dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0"} Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.175934 4810 generic.go:334] "Generic (PLEG): container finished" podID="ff3acf97-a548-45d1-9afe-0d228613d06b" containerID="6ed2d9808027d0c78077aa69d9fef1559eb61cb472ed2dddd67831d6df22a0d8" exitCode=0 Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.175987 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plszl" event={"ID":"ff3acf97-a548-45d1-9afe-0d228613d06b","Type":"ContainerDied","Data":"6ed2d9808027d0c78077aa69d9fef1559eb61cb472ed2dddd67831d6df22a0d8"} Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.178732 4810 generic.go:334] "Generic (PLEG): container finished" podID="7b8d67b8-aebd-42c7-98fe-5730ea71524e" containerID="09a2ea89df064bac862e6e3c35c8846acad96b5803004724135d47d5a90555c3" exitCode=0 Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.178845 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6bh5" event={"ID":"7b8d67b8-aebd-42c7-98fe-5730ea71524e","Type":"ContainerDied","Data":"09a2ea89df064bac862e6e3c35c8846acad96b5803004724135d47d5a90555c3"} Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.178881 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6bh5" event={"ID":"7b8d67b8-aebd-42c7-98fe-5730ea71524e","Type":"ContainerStarted","Data":"cef07d8b855174720774bb3daad148ac9fe69deedc6cf64e305676bc4b2819a0"} Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.184012 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lgslb" event={"ID":"14c7ea46-7eeb-4c37-84d1-d3072de592c7","Type":"ContainerStarted","Data":"7a941e0644453dd323fc649a2850e45ab4576bc44505e323d72095991bd396e9"} Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.201453 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t2q59" podStartSLOduration=2.783047269 podStartE2EDuration="4.20142396s" podCreationTimestamp="2025-10-09 00:11:07 +0000 UTC" firstStartedPulling="2025-10-09 00:11:09.147793953 +0000 UTC m=+266.673432654" lastFinishedPulling="2025-10-09 00:11:10.566170644 +0000 UTC m=+268.091809345" observedRunningTime="2025-10-09 00:11:11.194396468 +0000 UTC m=+268.720035190" watchObservedRunningTime="2025-10-09 00:11:11.20142396 +0000 UTC m=+268.727062741" Oct 09 00:11:11 crc kubenswrapper[4810]: I1009 00:11:11.257018 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lgslb" podStartSLOduration=1.607572767 podStartE2EDuration="4.256993972s" podCreationTimestamp="2025-10-09 00:11:07 +0000 UTC" firstStartedPulling="2025-10-09 00:11:08.135316966 +0000 UTC m=+265.660955667" lastFinishedPulling="2025-10-09 00:11:10.784738171 +0000 UTC m=+268.310376872" observedRunningTime="2025-10-09 00:11:11.254897629 +0000 UTC m=+268.780536360" watchObservedRunningTime="2025-10-09 00:11:11.256993972 +0000 UTC m=+268.782632693" Oct 09 00:11:13 crc kubenswrapper[4810]: I1009 00:11:13.208476 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6bh5" event={"ID":"7b8d67b8-aebd-42c7-98fe-5730ea71524e","Type":"ContainerStarted","Data":"7826e7df038dfb0d38e1e5689674cea05ae6706823f487d07b80802ad9467a1b"} Oct 09 00:11:14 crc kubenswrapper[4810]: I1009 00:11:14.214351 4810 generic.go:334] "Generic (PLEG): container finished" podID="ff3acf97-a548-45d1-9afe-0d228613d06b" containerID="88da5c36332b8a8ed34fd0c0f3902be935b742e6b6200c3c86818fb149a118c9" exitCode=0 Oct 09 00:11:14 crc kubenswrapper[4810]: I1009 00:11:14.214427 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plszl" event={"ID":"ff3acf97-a548-45d1-9afe-0d228613d06b","Type":"ContainerDied","Data":"88da5c36332b8a8ed34fd0c0f3902be935b742e6b6200c3c86818fb149a118c9"} Oct 09 00:11:14 crc kubenswrapper[4810]: I1009 00:11:14.216816 4810 generic.go:334] "Generic (PLEG): container finished" podID="7b8d67b8-aebd-42c7-98fe-5730ea71524e" containerID="7826e7df038dfb0d38e1e5689674cea05ae6706823f487d07b80802ad9467a1b" exitCode=0 Oct 09 00:11:14 crc kubenswrapper[4810]: I1009 00:11:14.216844 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6bh5" event={"ID":"7b8d67b8-aebd-42c7-98fe-5730ea71524e","Type":"ContainerDied","Data":"7826e7df038dfb0d38e1e5689674cea05ae6706823f487d07b80802ad9467a1b"} Oct 09 00:11:15 crc kubenswrapper[4810]: I1009 00:11:15.225725 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-plszl" event={"ID":"ff3acf97-a548-45d1-9afe-0d228613d06b","Type":"ContainerStarted","Data":"1c58c048ed0927b83c28b012c74e415ff123a41a5f2b62332fd695b5984b5c27"} Oct 09 00:11:15 crc kubenswrapper[4810]: I1009 00:11:15.228396 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p6bh5" event={"ID":"7b8d67b8-aebd-42c7-98fe-5730ea71524e","Type":"ContainerStarted","Data":"bc84cc3175b6eb377146017a42d83ed205e65476ad0654db544653aa914fa041"} Oct 09 00:11:15 crc kubenswrapper[4810]: I1009 00:11:15.246141 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-plszl" podStartSLOduration=2.754628516 podStartE2EDuration="6.24612028s" podCreationTimestamp="2025-10-09 00:11:09 +0000 UTC" firstStartedPulling="2025-10-09 00:11:11.178467639 +0000 UTC m=+268.704106340" lastFinishedPulling="2025-10-09 00:11:14.669959403 +0000 UTC m=+272.195598104" observedRunningTime="2025-10-09 00:11:15.244248573 +0000 UTC m=+272.769887294" watchObservedRunningTime="2025-10-09 00:11:15.24612028 +0000 UTC m=+272.771758981" Oct 09 00:11:15 crc kubenswrapper[4810]: I1009 00:11:15.263894 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p6bh5" podStartSLOduration=2.760629556 podStartE2EDuration="6.263875304s" podCreationTimestamp="2025-10-09 00:11:09 +0000 UTC" firstStartedPulling="2025-10-09 00:11:11.180669185 +0000 UTC m=+268.706307886" lastFinishedPulling="2025-10-09 00:11:14.683914933 +0000 UTC m=+272.209553634" observedRunningTime="2025-10-09 00:11:15.259153812 +0000 UTC m=+272.784792553" watchObservedRunningTime="2025-10-09 00:11:15.263875304 +0000 UTC m=+272.789514015" Oct 09 00:11:17 crc kubenswrapper[4810]: I1009 00:11:17.526444 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:17 crc kubenswrapper[4810]: I1009 00:11:17.526726 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:17 crc kubenswrapper[4810]: I1009 00:11:17.588369 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:17 crc kubenswrapper[4810]: I1009 00:11:17.746590 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:17 crc kubenswrapper[4810]: I1009 00:11:17.746940 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:17 crc kubenswrapper[4810]: I1009 00:11:17.797215 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:18 crc kubenswrapper[4810]: I1009 00:11:18.284886 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:11:18 crc kubenswrapper[4810]: I1009 00:11:18.294386 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lgslb" Oct 09 00:11:19 crc kubenswrapper[4810]: I1009 00:11:19.937627 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:19 crc kubenswrapper[4810]: I1009 00:11:19.937678 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:19 crc kubenswrapper[4810]: I1009 00:11:19.989745 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:20 crc kubenswrapper[4810]: I1009 00:11:20.129794 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:20 crc kubenswrapper[4810]: I1009 00:11:20.130050 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:20 crc kubenswrapper[4810]: I1009 00:11:20.174064 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:11:20 crc kubenswrapper[4810]: I1009 00:11:20.297325 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-plszl" Oct 09 00:11:20 crc kubenswrapper[4810]: I1009 00:11:20.308898 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p6bh5" Oct 09 00:12:21 crc kubenswrapper[4810]: I1009 00:12:21.184346 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:12:21 crc kubenswrapper[4810]: I1009 00:12:21.185181 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:12:51 crc kubenswrapper[4810]: I1009 00:12:51.184462 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:12:51 crc kubenswrapper[4810]: I1009 00:12:51.185162 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:13:21 crc kubenswrapper[4810]: I1009 00:13:21.184450 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:13:21 crc kubenswrapper[4810]: I1009 00:13:21.185177 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:13:21 crc kubenswrapper[4810]: I1009 00:13:21.185261 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:13:21 crc kubenswrapper[4810]: I1009 00:13:21.186141 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c6269592bf80ac0d143f3317419b8c107ac59ae5aaaebd6c80994bfff7891dd2"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:13:21 crc kubenswrapper[4810]: I1009 00:13:21.186289 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://c6269592bf80ac0d143f3317419b8c107ac59ae5aaaebd6c80994bfff7891dd2" gracePeriod=600 Oct 09 00:13:22 crc kubenswrapper[4810]: I1009 00:13:22.086321 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="c6269592bf80ac0d143f3317419b8c107ac59ae5aaaebd6c80994bfff7891dd2" exitCode=0 Oct 09 00:13:22 crc kubenswrapper[4810]: I1009 00:13:22.086427 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"c6269592bf80ac0d143f3317419b8c107ac59ae5aaaebd6c80994bfff7891dd2"} Oct 09 00:13:22 crc kubenswrapper[4810]: I1009 00:13:22.087027 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"966edc9b311c0a1dc5d942427ebcd50c633f330f904d8cc5143b722007893d59"} Oct 09 00:13:22 crc kubenswrapper[4810]: I1009 00:13:22.087064 4810 scope.go:117] "RemoveContainer" containerID="3fd197d3e927fe4dfb5a440d1b0b171c7a8f46acdeb53b862fdf1e135caedf93" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.304062 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zw6tc"] Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.305844 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.317442 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zw6tc"] Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413116 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-registry-tls\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413162 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f26c3a0-d4b2-42f3-9f19-979275bef290-trusted-ca\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413190 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f26c3a0-d4b2-42f3-9f19-979275bef290-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413242 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnrqj\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-kube-api-access-vnrqj\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413289 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-bound-sa-token\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413340 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f26c3a0-d4b2-42f3-9f19-979275bef290-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413375 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f26c3a0-d4b2-42f3-9f19-979275bef290-registry-certificates\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.413472 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.441989 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514480 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-registry-tls\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514545 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f26c3a0-d4b2-42f3-9f19-979275bef290-trusted-ca\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514574 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f26c3a0-d4b2-42f3-9f19-979275bef290-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514624 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnrqj\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-kube-api-access-vnrqj\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514663 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-bound-sa-token\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514698 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f26c3a0-d4b2-42f3-9f19-979275bef290-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.514719 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f26c3a0-d4b2-42f3-9f19-979275bef290-registry-certificates\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.515484 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f26c3a0-d4b2-42f3-9f19-979275bef290-ca-trust-extracted\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.516700 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f26c3a0-d4b2-42f3-9f19-979275bef290-trusted-ca\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.516984 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f26c3a0-d4b2-42f3-9f19-979275bef290-registry-certificates\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.531333 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f26c3a0-d4b2-42f3-9f19-979275bef290-installation-pull-secrets\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.531837 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-registry-tls\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.535022 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-bound-sa-token\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.541576 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnrqj\" (UniqueName: \"kubernetes.io/projected/8f26c3a0-d4b2-42f3-9f19-979275bef290-kube-api-access-vnrqj\") pod \"image-registry-66df7c8f76-zw6tc\" (UID: \"8f26c3a0-d4b2-42f3-9f19-979275bef290\") " pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.625787 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:53 crc kubenswrapper[4810]: I1009 00:13:53.849497 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-zw6tc"] Oct 09 00:13:54 crc kubenswrapper[4810]: I1009 00:13:54.309579 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" event={"ID":"8f26c3a0-d4b2-42f3-9f19-979275bef290","Type":"ContainerStarted","Data":"6f41d4892b1c6baa0b9547cb8cf545f0e8a1daec68274742608772bf25484cc1"} Oct 09 00:13:54 crc kubenswrapper[4810]: I1009 00:13:54.309639 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" event={"ID":"8f26c3a0-d4b2-42f3-9f19-979275bef290","Type":"ContainerStarted","Data":"f57fa589b16a7842c74549b378a39503e712f132f41c130a010b6446206b50ec"} Oct 09 00:13:54 crc kubenswrapper[4810]: I1009 00:13:54.309757 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:13:54 crc kubenswrapper[4810]: I1009 00:13:54.343500 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" podStartSLOduration=1.3434717649999999 podStartE2EDuration="1.343471765s" podCreationTimestamp="2025-10-09 00:13:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:13:54.339261892 +0000 UTC m=+431.864900663" watchObservedRunningTime="2025-10-09 00:13:54.343471765 +0000 UTC m=+431.869110496" Oct 09 00:14:13 crc kubenswrapper[4810]: I1009 00:14:13.634092 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-zw6tc" Oct 09 00:14:13 crc kubenswrapper[4810]: I1009 00:14:13.708528 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vfwx7"] Oct 09 00:14:38 crc kubenswrapper[4810]: I1009 00:14:38.747198 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" podUID="a2bb4c7a-46ee-4294-ac9f-97a89488515d" containerName="registry" containerID="cri-o://c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160" gracePeriod=30 Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.131899 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280235 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-tls\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280349 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a2bb4c7a-46ee-4294-ac9f-97a89488515d-installation-pull-secrets\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280408 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-bound-sa-token\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280454 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a2bb4c7a-46ee-4294-ac9f-97a89488515d-ca-trust-extracted\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280502 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjn9g\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-kube-api-access-cjn9g\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280544 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-trusted-ca\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280645 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-certificates\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.280863 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\" (UID: \"a2bb4c7a-46ee-4294-ac9f-97a89488515d\") " Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.282202 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.284234 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.288007 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-kube-api-access-cjn9g" (OuterVolumeSpecName: "kube-api-access-cjn9g") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "kube-api-access-cjn9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.289154 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.289254 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.289878 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2bb4c7a-46ee-4294-ac9f-97a89488515d-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.300765 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.306341 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2bb4c7a-46ee-4294-ac9f-97a89488515d-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "a2bb4c7a-46ee-4294-ac9f-97a89488515d" (UID: "a2bb4c7a-46ee-4294-ac9f-97a89488515d"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383102 4810 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a2bb4c7a-46ee-4294-ac9f-97a89488515d-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383167 4810 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383198 4810 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a2bb4c7a-46ee-4294-ac9f-97a89488515d-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383218 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjn9g\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-kube-api-access-cjn9g\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383238 4810 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383257 4810 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.383277 4810 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a2bb4c7a-46ee-4294-ac9f-97a89488515d-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.625322 4810 generic.go:334] "Generic (PLEG): container finished" podID="a2bb4c7a-46ee-4294-ac9f-97a89488515d" containerID="c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160" exitCode=0 Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.625394 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" event={"ID":"a2bb4c7a-46ee-4294-ac9f-97a89488515d","Type":"ContainerDied","Data":"c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160"} Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.625449 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" event={"ID":"a2bb4c7a-46ee-4294-ac9f-97a89488515d","Type":"ContainerDied","Data":"4b35c8125defe2079b904c22bb1c9dd3ca3fefeebb9e874421da3a3294fc363c"} Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.625453 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vfwx7" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.625472 4810 scope.go:117] "RemoveContainer" containerID="c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.652155 4810 scope.go:117] "RemoveContainer" containerID="c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160" Oct 09 00:14:39 crc kubenswrapper[4810]: E1009 00:14:39.652588 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160\": container with ID starting with c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160 not found: ID does not exist" containerID="c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.652623 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160"} err="failed to get container status \"c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160\": rpc error: code = NotFound desc = could not find container \"c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160\": container with ID starting with c74d713147b356626e6e76e22266cc95add78d641f0f24fc8ce856e318d88160 not found: ID does not exist" Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.667133 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vfwx7"] Oct 09 00:14:39 crc kubenswrapper[4810]: I1009 00:14:39.671261 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vfwx7"] Oct 09 00:14:41 crc kubenswrapper[4810]: I1009 00:14:41.262321 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2bb4c7a-46ee-4294-ac9f-97a89488515d" path="/var/lib/kubelet/pods/a2bb4c7a-46ee-4294-ac9f-97a89488515d/volumes" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.159678 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8"] Oct 09 00:15:00 crc kubenswrapper[4810]: E1009 00:15:00.160536 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2bb4c7a-46ee-4294-ac9f-97a89488515d" containerName="registry" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.160560 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2bb4c7a-46ee-4294-ac9f-97a89488515d" containerName="registry" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.160711 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2bb4c7a-46ee-4294-ac9f-97a89488515d" containerName="registry" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.161335 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.166622 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.167747 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8"] Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.167972 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.274376 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d5979467-4bee-4623-93f1-63687f26af8f-secret-volume\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.274473 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d5979467-4bee-4623-93f1-63687f26af8f-config-volume\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.274498 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxfqw\" (UniqueName: \"kubernetes.io/projected/d5979467-4bee-4623-93f1-63687f26af8f-kube-api-access-cxfqw\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.375302 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d5979467-4bee-4623-93f1-63687f26af8f-config-volume\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.375425 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxfqw\" (UniqueName: \"kubernetes.io/projected/d5979467-4bee-4623-93f1-63687f26af8f-kube-api-access-cxfqw\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.375514 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d5979467-4bee-4623-93f1-63687f26af8f-secret-volume\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.376339 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d5979467-4bee-4623-93f1-63687f26af8f-config-volume\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.381236 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d5979467-4bee-4623-93f1-63687f26af8f-secret-volume\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.395627 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxfqw\" (UniqueName: \"kubernetes.io/projected/d5979467-4bee-4623-93f1-63687f26af8f-kube-api-access-cxfqw\") pod \"collect-profiles-29332815-jb7r8\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.484366 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.655192 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8"] Oct 09 00:15:00 crc kubenswrapper[4810]: W1009 00:15:00.660711 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5979467_4bee_4623_93f1_63687f26af8f.slice/crio-838f7211dc4b5218e7d08f397e460ba7409ee5f60f57b08c4e2d576ee0df3a5e WatchSource:0}: Error finding container 838f7211dc4b5218e7d08f397e460ba7409ee5f60f57b08c4e2d576ee0df3a5e: Status 404 returned error can't find the container with id 838f7211dc4b5218e7d08f397e460ba7409ee5f60f57b08c4e2d576ee0df3a5e Oct 09 00:15:00 crc kubenswrapper[4810]: I1009 00:15:00.760106 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" event={"ID":"d5979467-4bee-4623-93f1-63687f26af8f","Type":"ContainerStarted","Data":"838f7211dc4b5218e7d08f397e460ba7409ee5f60f57b08c4e2d576ee0df3a5e"} Oct 09 00:15:01 crc kubenswrapper[4810]: I1009 00:15:01.772066 4810 generic.go:334] "Generic (PLEG): container finished" podID="d5979467-4bee-4623-93f1-63687f26af8f" containerID="774147eb6e492408a04dbaba1fd811933910e378b0d8a44a48b2075e9d009f5b" exitCode=0 Oct 09 00:15:01 crc kubenswrapper[4810]: I1009 00:15:01.772152 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" event={"ID":"d5979467-4bee-4623-93f1-63687f26af8f","Type":"ContainerDied","Data":"774147eb6e492408a04dbaba1fd811933910e378b0d8a44a48b2075e9d009f5b"} Oct 09 00:15:02 crc kubenswrapper[4810]: I1009 00:15:02.999532 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.110326 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d5979467-4bee-4623-93f1-63687f26af8f-secret-volume\") pod \"d5979467-4bee-4623-93f1-63687f26af8f\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.110776 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d5979467-4bee-4623-93f1-63687f26af8f-config-volume\") pod \"d5979467-4bee-4623-93f1-63687f26af8f\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.110887 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxfqw\" (UniqueName: \"kubernetes.io/projected/d5979467-4bee-4623-93f1-63687f26af8f-kube-api-access-cxfqw\") pod \"d5979467-4bee-4623-93f1-63687f26af8f\" (UID: \"d5979467-4bee-4623-93f1-63687f26af8f\") " Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.111285 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5979467-4bee-4623-93f1-63687f26af8f-config-volume" (OuterVolumeSpecName: "config-volume") pod "d5979467-4bee-4623-93f1-63687f26af8f" (UID: "d5979467-4bee-4623-93f1-63687f26af8f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.115867 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5979467-4bee-4623-93f1-63687f26af8f-kube-api-access-cxfqw" (OuterVolumeSpecName: "kube-api-access-cxfqw") pod "d5979467-4bee-4623-93f1-63687f26af8f" (UID: "d5979467-4bee-4623-93f1-63687f26af8f"). InnerVolumeSpecName "kube-api-access-cxfqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.116549 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5979467-4bee-4623-93f1-63687f26af8f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d5979467-4bee-4623-93f1-63687f26af8f" (UID: "d5979467-4bee-4623-93f1-63687f26af8f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.212414 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxfqw\" (UniqueName: \"kubernetes.io/projected/d5979467-4bee-4623-93f1-63687f26af8f-kube-api-access-cxfqw\") on node \"crc\" DevicePath \"\"" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.212463 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d5979467-4bee-4623-93f1-63687f26af8f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.212477 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d5979467-4bee-4623-93f1-63687f26af8f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.786367 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" event={"ID":"d5979467-4bee-4623-93f1-63687f26af8f","Type":"ContainerDied","Data":"838f7211dc4b5218e7d08f397e460ba7409ee5f60f57b08c4e2d576ee0df3a5e"} Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.786414 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="838f7211dc4b5218e7d08f397e460ba7409ee5f60f57b08c4e2d576ee0df3a5e" Oct 09 00:15:03 crc kubenswrapper[4810]: I1009 00:15:03.786391 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332815-jb7r8" Oct 09 00:15:21 crc kubenswrapper[4810]: I1009 00:15:21.184390 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:15:21 crc kubenswrapper[4810]: I1009 00:15:21.185114 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:15:51 crc kubenswrapper[4810]: I1009 00:15:51.184938 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:15:51 crc kubenswrapper[4810]: I1009 00:15:51.185535 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.559898 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-sd2lj"] Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561381 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="nbdb" containerID="cri-o://2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561463 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-acl-logging" containerID="cri-o://339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561404 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="sbdb" containerID="cri-o://aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561320 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-controller" containerID="cri-o://c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561542 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-node" containerID="cri-o://33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561415 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.561469 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="northd" containerID="cri-o://d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.611638 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" containerID="cri-o://1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" gracePeriod=30 Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.918505 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/3.log" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.923416 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovn-acl-logging/0.log" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.924634 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovn-controller/0.log" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.925123 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971534 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-75t6j"] Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971772 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971787 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971800 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971807 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971841 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="northd" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971849 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="northd" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971863 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5979467-4bee-4623-93f1-63687f26af8f" containerName="collect-profiles" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971871 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5979467-4bee-4623-93f1-63687f26af8f" containerName="collect-profiles" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971881 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="sbdb" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971888 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="sbdb" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971900 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-node" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971907 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-node" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971917 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kubecfg-setup" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971924 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kubecfg-setup" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971933 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971939 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971949 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971958 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971968 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="nbdb" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971976 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="nbdb" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.971990 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.971998 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.972008 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-acl-logging" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972015 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-acl-logging" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972124 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-node" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972140 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972148 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="northd" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972159 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972168 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5979467-4bee-4623-93f1-63687f26af8f" containerName="collect-profiles" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972179 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972189 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972196 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="sbdb" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972205 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972214 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="nbdb" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972223 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972231 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovn-acl-logging" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.972334 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972344 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972640 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: E1009 00:16:14.972752 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.972762 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerName="ovnkube-controller" Oct 09 00:16:14 crc kubenswrapper[4810]: I1009 00:16:14.974749 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.010981 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-var-lib-cni-networks-ovn-kubernetes\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011017 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-script-lib\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011032 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-netns\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011041 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011049 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovn-node-metrics-cert\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011089 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-ovn-kubernetes\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011107 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-netd\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011127 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-var-lib-openvswitch\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011142 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-log-socket\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011164 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-kubelet\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011186 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqsrd\" (UniqueName: \"kubernetes.io/projected/bfb7a412-4af9-4aa0-a3e8-d46dab040385-kube-api-access-tqsrd\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011212 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-ovn\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011239 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-node-log\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011270 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-openvswitch\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011290 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-bin\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011366 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011367 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011389 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011400 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011425 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011423 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011443 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011455 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-log-socket" (OuterVolumeSpecName: "log-socket") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011366 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-node-log" (OuterVolumeSpecName: "node-log") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011560 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011540 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011727 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011871 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-systemd-units\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011905 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-systemd\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011933 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-config\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011973 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-env-overrides\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.011998 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-etc-openvswitch\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012021 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-slash\") pod \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\" (UID: \"bfb7a412-4af9-4aa0-a3e8-d46dab040385\") " Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012088 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012173 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-slash" (OuterVolumeSpecName: "host-slash") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012227 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-var-lib-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012262 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-ovn\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-env-overrides\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012298 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovnkube-config\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012374 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phk4n\" (UniqueName: \"kubernetes.io/projected/dfcce80a-4e02-4b4b-bc3b-c882904970a7-kube-api-access-phk4n\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012402 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovnkube-script-lib\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012427 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-node-log\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012447 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-log-socket\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012496 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012547 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012559 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-systemd-units\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012644 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012695 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-run-netns\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012732 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-run-ovn-kubernetes\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012834 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-etc-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012871 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-cni-netd\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012924 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-systemd\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.012982 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-cni-bin\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013128 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013171 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-kubelet\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013208 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovn-node-metrics-cert\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013239 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-slash\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013333 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013356 4810 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013371 4810 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013383 4810 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-slash\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013398 4810 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013412 4810 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013425 4810 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013438 4810 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013451 4810 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013464 4810 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013476 4810 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-log-socket\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013489 4810 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013503 4810 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013515 4810 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-node-log\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013527 4810 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013538 4810 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.013549 4810 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.016048 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.016148 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfb7a412-4af9-4aa0-a3e8-d46dab040385-kube-api-access-tqsrd" (OuterVolumeSpecName: "kube-api-access-tqsrd") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "kube-api-access-tqsrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.023051 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "bfb7a412-4af9-4aa0-a3e8-d46dab040385" (UID: "bfb7a412-4af9-4aa0-a3e8-d46dab040385"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115035 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-log-socket\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115095 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-systemd-units\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115143 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115150 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-log-socket\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115195 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-systemd-units\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115215 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115166 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-run-netns\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-run-ovn-kubernetes\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115291 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-etc-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115312 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-cni-netd\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115327 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-run-netns\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115361 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-etc-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115381 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-run-ovn-kubernetes\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115385 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-cni-netd\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115343 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-systemd\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115437 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-cni-bin\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115458 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-systemd\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115486 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-kubelet\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115500 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115487 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-cni-bin\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115519 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovn-node-metrics-cert\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115566 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-slash\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115599 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-var-lib-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115619 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-ovn\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115622 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-kubelet\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115639 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-env-overrides\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115758 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovnkube-config\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115787 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phk4n\" (UniqueName: \"kubernetes.io/projected/dfcce80a-4e02-4b4b-bc3b-c882904970a7-kube-api-access-phk4n\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115807 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovnkube-script-lib\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115855 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-node-log\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115921 4810 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bfb7a412-4af9-4aa0-a3e8-d46dab040385-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115937 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqsrd\" (UniqueName: \"kubernetes.io/projected/bfb7a412-4af9-4aa0-a3e8-d46dab040385-kube-api-access-tqsrd\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115953 4810 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bfb7a412-4af9-4aa0-a3e8-d46dab040385-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.115990 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-node-log\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.116247 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-env-overrides\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.116295 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-host-slash\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.116321 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-var-lib-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.116346 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-ovn\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.116600 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/dfcce80a-4e02-4b4b-bc3b-c882904970a7-run-openvswitch\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.116675 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovnkube-config\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.117051 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovnkube-script-lib\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.118376 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/dfcce80a-4e02-4b4b-bc3b-c882904970a7-ovn-node-metrics-cert\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.132568 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phk4n\" (UniqueName: \"kubernetes.io/projected/dfcce80a-4e02-4b4b-bc3b-c882904970a7-kube-api-access-phk4n\") pod \"ovnkube-node-75t6j\" (UID: \"dfcce80a-4e02-4b4b-bc3b-c882904970a7\") " pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.237306 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/2.log" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.237849 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/1.log" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.237915 4810 generic.go:334] "Generic (PLEG): container finished" podID="8e9fffc1-16a6-4108-978b-6e85bdfd9c4f" containerID="2a35eb03a81dc3bf579210892f31a0deddf8aadd9d38af46a3613c7c5b5bec42" exitCode=2 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.237980 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerDied","Data":"2a35eb03a81dc3bf579210892f31a0deddf8aadd9d38af46a3613c7c5b5bec42"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.238015 4810 scope.go:117] "RemoveContainer" containerID="58052d32b55647157a375aa559ab58972b75789113c0e280eb05c02bdca2b136" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.238753 4810 scope.go:117] "RemoveContainer" containerID="2a35eb03a81dc3bf579210892f31a0deddf8aadd9d38af46a3613c7c5b5bec42" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.239144 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vrlxd_openshift-multus(8e9fffc1-16a6-4108-978b-6e85bdfd9c4f)\"" pod="openshift-multus/multus-vrlxd" podUID="8e9fffc1-16a6-4108-978b-6e85bdfd9c4f" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.242541 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovnkube-controller/3.log" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.247990 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovn-acl-logging/0.log" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.248884 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-sd2lj_bfb7a412-4af9-4aa0-a3e8-d46dab040385/ovn-controller/0.log" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249626 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" exitCode=0 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249648 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" exitCode=0 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249658 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" exitCode=0 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249667 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" exitCode=0 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249675 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" exitCode=0 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249664 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249739 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249769 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249683 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" exitCode=0 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249814 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249887 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" exitCode=143 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249931 4810 generic.go:334] "Generic (PLEG): container finished" podID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" exitCode=143 Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249793 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.249993 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250035 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250062 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250089 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250108 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250124 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250138 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250156 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250171 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250186 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250203 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250217 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250240 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250265 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250283 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250298 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250317 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250332 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250347 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250362 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250376 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250391 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250405 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250427 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250452 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250471 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250486 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250499 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250514 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250530 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250544 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250558 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250573 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250587 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250609 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sd2lj" event={"ID":"bfb7a412-4af9-4aa0-a3e8-d46dab040385","Type":"ContainerDied","Data":"2386c291419156c2556618dbce80db50a225cf2f8457f5540fbc4a86fa87eaff"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250634 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250652 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250667 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250723 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250738 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250753 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250769 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250785 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250800 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.250817 4810 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.284461 4810 scope.go:117] "RemoveContainer" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.290001 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.307107 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-sd2lj"] Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.312518 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-sd2lj"] Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.339672 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.370373 4810 scope.go:117] "RemoveContainer" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.387085 4810 scope.go:117] "RemoveContainer" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.404523 4810 scope.go:117] "RemoveContainer" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.425700 4810 scope.go:117] "RemoveContainer" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.445495 4810 scope.go:117] "RemoveContainer" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.465753 4810 scope.go:117] "RemoveContainer" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.491093 4810 scope.go:117] "RemoveContainer" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.581535 4810 scope.go:117] "RemoveContainer" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.595655 4810 scope.go:117] "RemoveContainer" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.596054 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": container with ID starting with 1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c not found: ID does not exist" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.596088 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} err="failed to get container status \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": rpc error: code = NotFound desc = could not find container \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": container with ID starting with 1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.596114 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.596464 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": container with ID starting with 5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c not found: ID does not exist" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.596496 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} err="failed to get container status \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": rpc error: code = NotFound desc = could not find container \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": container with ID starting with 5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.596514 4810 scope.go:117] "RemoveContainer" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.596754 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": container with ID starting with aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919 not found: ID does not exist" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.596776 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} err="failed to get container status \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": rpc error: code = NotFound desc = could not find container \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": container with ID starting with aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.596789 4810 scope.go:117] "RemoveContainer" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.597008 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": container with ID starting with 2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb not found: ID does not exist" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597039 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} err="failed to get container status \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": rpc error: code = NotFound desc = could not find container \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": container with ID starting with 2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597057 4810 scope.go:117] "RemoveContainer" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.597349 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": container with ID starting with d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b not found: ID does not exist" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597370 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} err="failed to get container status \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": rpc error: code = NotFound desc = could not find container \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": container with ID starting with d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597382 4810 scope.go:117] "RemoveContainer" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.597585 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": container with ID starting with 384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4 not found: ID does not exist" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597603 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} err="failed to get container status \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": rpc error: code = NotFound desc = could not find container \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": container with ID starting with 384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597613 4810 scope.go:117] "RemoveContainer" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.597809 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": container with ID starting with 33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7 not found: ID does not exist" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597849 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} err="failed to get container status \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": rpc error: code = NotFound desc = could not find container \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": container with ID starting with 33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.597861 4810 scope.go:117] "RemoveContainer" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.598089 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": container with ID starting with 339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c not found: ID does not exist" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598109 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} err="failed to get container status \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": rpc error: code = NotFound desc = could not find container \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": container with ID starting with 339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598119 4810 scope.go:117] "RemoveContainer" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.598349 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": container with ID starting with c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d not found: ID does not exist" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598364 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} err="failed to get container status \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": rpc error: code = NotFound desc = could not find container \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": container with ID starting with c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598377 4810 scope.go:117] "RemoveContainer" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" Oct 09 00:16:15 crc kubenswrapper[4810]: E1009 00:16:15.598537 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": container with ID starting with 3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f not found: ID does not exist" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598550 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} err="failed to get container status \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": rpc error: code = NotFound desc = could not find container \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": container with ID starting with 3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598562 4810 scope.go:117] "RemoveContainer" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598724 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} err="failed to get container status \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": rpc error: code = NotFound desc = could not find container \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": container with ID starting with 1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598736 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598882 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} err="failed to get container status \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": rpc error: code = NotFound desc = could not find container \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": container with ID starting with 5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.598894 4810 scope.go:117] "RemoveContainer" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599088 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} err="failed to get container status \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": rpc error: code = NotFound desc = could not find container \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": container with ID starting with aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599100 4810 scope.go:117] "RemoveContainer" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599258 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} err="failed to get container status \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": rpc error: code = NotFound desc = could not find container \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": container with ID starting with 2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599275 4810 scope.go:117] "RemoveContainer" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599433 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} err="failed to get container status \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": rpc error: code = NotFound desc = could not find container \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": container with ID starting with d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599448 4810 scope.go:117] "RemoveContainer" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599717 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} err="failed to get container status \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": rpc error: code = NotFound desc = could not find container \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": container with ID starting with 384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.599734 4810 scope.go:117] "RemoveContainer" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600133 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} err="failed to get container status \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": rpc error: code = NotFound desc = could not find container \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": container with ID starting with 33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600151 4810 scope.go:117] "RemoveContainer" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600342 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} err="failed to get container status \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": rpc error: code = NotFound desc = could not find container \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": container with ID starting with 339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600359 4810 scope.go:117] "RemoveContainer" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600564 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} err="failed to get container status \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": rpc error: code = NotFound desc = could not find container \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": container with ID starting with c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600581 4810 scope.go:117] "RemoveContainer" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600746 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} err="failed to get container status \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": rpc error: code = NotFound desc = could not find container \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": container with ID starting with 3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.600758 4810 scope.go:117] "RemoveContainer" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601002 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} err="failed to get container status \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": rpc error: code = NotFound desc = could not find container \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": container with ID starting with 1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601018 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601181 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} err="failed to get container status \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": rpc error: code = NotFound desc = could not find container \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": container with ID starting with 5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601198 4810 scope.go:117] "RemoveContainer" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601613 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} err="failed to get container status \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": rpc error: code = NotFound desc = could not find container \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": container with ID starting with aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601638 4810 scope.go:117] "RemoveContainer" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601879 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} err="failed to get container status \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": rpc error: code = NotFound desc = could not find container \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": container with ID starting with 2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.601896 4810 scope.go:117] "RemoveContainer" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602088 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} err="failed to get container status \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": rpc error: code = NotFound desc = could not find container \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": container with ID starting with d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602104 4810 scope.go:117] "RemoveContainer" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602326 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} err="failed to get container status \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": rpc error: code = NotFound desc = could not find container \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": container with ID starting with 384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602349 4810 scope.go:117] "RemoveContainer" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602701 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} err="failed to get container status \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": rpc error: code = NotFound desc = could not find container \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": container with ID starting with 33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602723 4810 scope.go:117] "RemoveContainer" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.602986 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} err="failed to get container status \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": rpc error: code = NotFound desc = could not find container \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": container with ID starting with 339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603009 4810 scope.go:117] "RemoveContainer" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603209 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} err="failed to get container status \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": rpc error: code = NotFound desc = could not find container \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": container with ID starting with c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603231 4810 scope.go:117] "RemoveContainer" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603468 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} err="failed to get container status \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": rpc error: code = NotFound desc = could not find container \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": container with ID starting with 3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603484 4810 scope.go:117] "RemoveContainer" containerID="1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603662 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c"} err="failed to get container status \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": rpc error: code = NotFound desc = could not find container \"1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c\": container with ID starting with 1f77b5b50f91a1b2aaf78fbad7b18bf045f8126292f4bdceb977d6a19ca70e8c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603687 4810 scope.go:117] "RemoveContainer" containerID="5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603923 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c"} err="failed to get container status \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": rpc error: code = NotFound desc = could not find container \"5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c\": container with ID starting with 5e4e561b207795d34689436db7148e6910b4ca3cd753c906326a0daa2e2ad80c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.603958 4810 scope.go:117] "RemoveContainer" containerID="aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.604217 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919"} err="failed to get container status \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": rpc error: code = NotFound desc = could not find container \"aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919\": container with ID starting with aa6a8c50220d158b8e3209b1a9c4ffd5145787bf44db2b68e0b0035ae3ebb919 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.604244 4810 scope.go:117] "RemoveContainer" containerID="2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.604527 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb"} err="failed to get container status \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": rpc error: code = NotFound desc = could not find container \"2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb\": container with ID starting with 2c3bc44e8648c6ac1c6dc31d009a2af2f126a9677fc5aa9c624e3719f143debb not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.604557 4810 scope.go:117] "RemoveContainer" containerID="d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605007 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b"} err="failed to get container status \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": rpc error: code = NotFound desc = could not find container \"d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b\": container with ID starting with d9a80c0b5ffbf673c5c64457c412194aec508ade5c559c8e4a782b428fdbc47b not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605030 4810 scope.go:117] "RemoveContainer" containerID="384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605250 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4"} err="failed to get container status \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": rpc error: code = NotFound desc = could not find container \"384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4\": container with ID starting with 384a16d6bde2af81746a4fc2d8c71e6a4f39ed5670a1947d3fe53771f25d85c4 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605271 4810 scope.go:117] "RemoveContainer" containerID="33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605439 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7"} err="failed to get container status \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": rpc error: code = NotFound desc = could not find container \"33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7\": container with ID starting with 33353d4c47725b436cbc17cde92bfd28ee42732e1a7066aeee84ff9fd88a8ff7 not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605458 4810 scope.go:117] "RemoveContainer" containerID="339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605851 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c"} err="failed to get container status \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": rpc error: code = NotFound desc = could not find container \"339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c\": container with ID starting with 339855c9c2a3000788ffeb8b39a53d1e24320c66406812f2f555d0ca4f7a570c not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.605874 4810 scope.go:117] "RemoveContainer" containerID="c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.606063 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d"} err="failed to get container status \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": rpc error: code = NotFound desc = could not find container \"c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d\": container with ID starting with c978494d1b41d4a54727b4bfc9a012e731e1005e993d18ef813e7bf8bbc8b97d not found: ID does not exist" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.606085 4810 scope.go:117] "RemoveContainer" containerID="3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f" Oct 09 00:16:15 crc kubenswrapper[4810]: I1009 00:16:15.606308 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f"} err="failed to get container status \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": rpc error: code = NotFound desc = could not find container \"3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f\": container with ID starting with 3942f306aca1379cfae2738c568f91e2d95bdffe7c7451ebd7e71250f38ccd6f not found: ID does not exist" Oct 09 00:16:16 crc kubenswrapper[4810]: I1009 00:16:16.258618 4810 generic.go:334] "Generic (PLEG): container finished" podID="dfcce80a-4e02-4b4b-bc3b-c882904970a7" containerID="b50cab238a1e4c741b5d22d43b2ee526994c5669fd476cd54eedea4212224954" exitCode=0 Oct 09 00:16:16 crc kubenswrapper[4810]: I1009 00:16:16.258734 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerDied","Data":"b50cab238a1e4c741b5d22d43b2ee526994c5669fd476cd54eedea4212224954"} Oct 09 00:16:16 crc kubenswrapper[4810]: I1009 00:16:16.259137 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"67c4ac400ebbadd30c3d599b7965ebd5a72cc2554050112bcd068f6717dbafa6"} Oct 09 00:16:16 crc kubenswrapper[4810]: I1009 00:16:16.274606 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/2.log" Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.266093 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfb7a412-4af9-4aa0-a3e8-d46dab040385" path="/var/lib/kubelet/pods/bfb7a412-4af9-4aa0-a3e8-d46dab040385/volumes" Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.293577 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"601db6ee30a999b258dc3f5d4e21c75156c2a2f2263fcf87bbbb476465ff9d2d"} Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.293636 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"c112325bab2b9af1e9bed8fec95afd55259c7f85540d2af180ca2a70fb4097d6"} Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.293663 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"0fb9abfc25528ecc39da1582ffbc5c7a9faf3c65a3b9d9f2fd51eb52e95a1e67"} Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.293681 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"24266ceb52ae455d577519dfb9b2fdf09cb5ed70731300ccde7defbfc5b54933"} Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.293701 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"994e209dd59404ddcbf07b7b0fbe43cb17a8b41abe96ffaf6c7a651dc09dff78"} Oct 09 00:16:17 crc kubenswrapper[4810]: I1009 00:16:17.293718 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"92b48d8c8329455ff6cf6bab9b332210624996b0271feb5ec41afa95253522ff"} Oct 09 00:16:19 crc kubenswrapper[4810]: I1009 00:16:19.320760 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"1bbbe585f111089e015ca3053b7d82f3814fc86926023beed85f9bde3cdb81a8"} Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.184418 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.184757 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.184824 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.185751 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"966edc9b311c0a1dc5d942427ebcd50c633f330f904d8cc5143b722007893d59"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.185889 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://966edc9b311c0a1dc5d942427ebcd50c633f330f904d8cc5143b722007893d59" gracePeriod=600 Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.335343 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="966edc9b311c0a1dc5d942427ebcd50c633f330f904d8cc5143b722007893d59" exitCode=0 Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.335408 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"966edc9b311c0a1dc5d942427ebcd50c633f330f904d8cc5143b722007893d59"} Oct 09 00:16:21 crc kubenswrapper[4810]: I1009 00:16:21.335461 4810 scope.go:117] "RemoveContainer" containerID="c6269592bf80ac0d143f3317419b8c107ac59ae5aaaebd6c80994bfff7891dd2" Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.351563 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"9655c39bfc737814bdf380a44432c260858cbf7ed04ea57c70188208f88269af"} Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.357690 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" event={"ID":"dfcce80a-4e02-4b4b-bc3b-c882904970a7","Type":"ContainerStarted","Data":"fef02b0749e87bdc003de0bb56403785070bc08c4f1d364359ed7119bf108a27"} Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.359234 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.359264 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.359449 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.394163 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.397181 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:22 crc kubenswrapper[4810]: I1009 00:16:22.414449 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" podStartSLOduration=8.414430891 podStartE2EDuration="8.414430891s" podCreationTimestamp="2025-10-09 00:16:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:16:22.410493007 +0000 UTC m=+579.936131768" watchObservedRunningTime="2025-10-09 00:16:22.414430891 +0000 UTC m=+579.940069592" Oct 09 00:16:30 crc kubenswrapper[4810]: I1009 00:16:30.253565 4810 scope.go:117] "RemoveContainer" containerID="2a35eb03a81dc3bf579210892f31a0deddf8aadd9d38af46a3613c7c5b5bec42" Oct 09 00:16:30 crc kubenswrapper[4810]: E1009 00:16:30.254312 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-vrlxd_openshift-multus(8e9fffc1-16a6-4108-978b-6e85bdfd9c4f)\"" pod="openshift-multus/multus-vrlxd" podUID="8e9fffc1-16a6-4108-978b-6e85bdfd9c4f" Oct 09 00:16:45 crc kubenswrapper[4810]: I1009 00:16:45.254325 4810 scope.go:117] "RemoveContainer" containerID="2a35eb03a81dc3bf579210892f31a0deddf8aadd9d38af46a3613c7c5b5bec42" Oct 09 00:16:45 crc kubenswrapper[4810]: I1009 00:16:45.334713 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-75t6j" Oct 09 00:16:45 crc kubenswrapper[4810]: I1009 00:16:45.514163 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-vrlxd_8e9fffc1-16a6-4108-978b-6e85bdfd9c4f/kube-multus/2.log" Oct 09 00:16:45 crc kubenswrapper[4810]: I1009 00:16:45.514219 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-vrlxd" event={"ID":"8e9fffc1-16a6-4108-978b-6e85bdfd9c4f","Type":"ContainerStarted","Data":"c6101e0cb43d874c3a3d87597bfa166636aa6ee3d093198761b78388caa043ef"} Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.148405 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t2q59"] Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.149169 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t2q59" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="registry-server" containerID="cri-o://dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0" gracePeriod=30 Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.563420 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.726758 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6js4x\" (UniqueName: \"kubernetes.io/projected/10919026-d45c-44ee-ba00-58329f902133-kube-api-access-6js4x\") pod \"10919026-d45c-44ee-ba00-58329f902133\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.726947 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-utilities\") pod \"10919026-d45c-44ee-ba00-58329f902133\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.727021 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-catalog-content\") pod \"10919026-d45c-44ee-ba00-58329f902133\" (UID: \"10919026-d45c-44ee-ba00-58329f902133\") " Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.728804 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-utilities" (OuterVolumeSpecName: "utilities") pod "10919026-d45c-44ee-ba00-58329f902133" (UID: "10919026-d45c-44ee-ba00-58329f902133"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.737071 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10919026-d45c-44ee-ba00-58329f902133-kube-api-access-6js4x" (OuterVolumeSpecName: "kube-api-access-6js4x") pod "10919026-d45c-44ee-ba00-58329f902133" (UID: "10919026-d45c-44ee-ba00-58329f902133"). InnerVolumeSpecName "kube-api-access-6js4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.757012 4810 generic.go:334] "Generic (PLEG): container finished" podID="10919026-d45c-44ee-ba00-58329f902133" containerID="dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0" exitCode=0 Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.757093 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerDied","Data":"dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0"} Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.757109 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t2q59" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.757142 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t2q59" event={"ID":"10919026-d45c-44ee-ba00-58329f902133","Type":"ContainerDied","Data":"bd093962a444ed26a403bbb0d951a514d1796f2165f0c9830742460224abfd97"} Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.757184 4810 scope.go:117] "RemoveContainer" containerID="dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.757892 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10919026-d45c-44ee-ba00-58329f902133" (UID: "10919026-d45c-44ee-ba00-58329f902133"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.779701 4810 scope.go:117] "RemoveContainer" containerID="d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.791684 4810 scope.go:117] "RemoveContainer" containerID="3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.814730 4810 scope.go:117] "RemoveContainer" containerID="dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0" Oct 09 00:17:20 crc kubenswrapper[4810]: E1009 00:17:20.815170 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0\": container with ID starting with dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0 not found: ID does not exist" containerID="dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.815237 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0"} err="failed to get container status \"dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0\": rpc error: code = NotFound desc = could not find container \"dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0\": container with ID starting with dd4505dcd0a018a5ca30a71d1029beca2c2d6c123cd185baf71efc83bf6a32d0 not found: ID does not exist" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.815276 4810 scope.go:117] "RemoveContainer" containerID="d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce" Oct 09 00:17:20 crc kubenswrapper[4810]: E1009 00:17:20.815626 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce\": container with ID starting with d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce not found: ID does not exist" containerID="d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.815658 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce"} err="failed to get container status \"d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce\": rpc error: code = NotFound desc = could not find container \"d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce\": container with ID starting with d99144664a4be809fe01072a61e6926cc230fe7bd50d3ff55df7264dd824c5ce not found: ID does not exist" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.815678 4810 scope.go:117] "RemoveContainer" containerID="3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c" Oct 09 00:17:20 crc kubenswrapper[4810]: E1009 00:17:20.816039 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c\": container with ID starting with 3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c not found: ID does not exist" containerID="3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.816082 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c"} err="failed to get container status \"3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c\": rpc error: code = NotFound desc = could not find container \"3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c\": container with ID starting with 3781506b67d348bd0129696982408e13b674980485f48c33cbb674a500f3a56c not found: ID does not exist" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.828343 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6js4x\" (UniqueName: \"kubernetes.io/projected/10919026-d45c-44ee-ba00-58329f902133-kube-api-access-6js4x\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.828375 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:20 crc kubenswrapper[4810]: I1009 00:17:20.828385 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10919026-d45c-44ee-ba00-58329f902133-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:21 crc kubenswrapper[4810]: I1009 00:17:21.088734 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t2q59"] Oct 09 00:17:21 crc kubenswrapper[4810]: I1009 00:17:21.098698 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t2q59"] Oct 09 00:17:21 crc kubenswrapper[4810]: I1009 00:17:21.261776 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10919026-d45c-44ee-ba00-58329f902133" path="/var/lib/kubelet/pods/10919026-d45c-44ee-ba00-58329f902133/volumes" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.671951 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp"] Oct 09 00:17:23 crc kubenswrapper[4810]: E1009 00:17:23.672610 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="extract-content" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.672654 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="extract-content" Oct 09 00:17:23 crc kubenswrapper[4810]: E1009 00:17:23.672680 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="registry-server" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.672691 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="registry-server" Oct 09 00:17:23 crc kubenswrapper[4810]: E1009 00:17:23.672707 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="extract-utilities" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.672742 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="extract-utilities" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.672991 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="10919026-d45c-44ee-ba00-58329f902133" containerName="registry-server" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.674351 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.677072 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.679331 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp"] Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.761908 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.762076 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.762145 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhznn\" (UniqueName: \"kubernetes.io/projected/6ea76cac-18f3-4d2b-baec-628d633a9f15-kube-api-access-bhznn\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.863922 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhznn\" (UniqueName: \"kubernetes.io/projected/6ea76cac-18f3-4d2b-baec-628d633a9f15-kube-api-access-bhznn\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.864071 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.864184 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.864999 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.865041 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.892510 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhznn\" (UniqueName: \"kubernetes.io/projected/6ea76cac-18f3-4d2b-baec-628d633a9f15-kube-api-access-bhznn\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:23 crc kubenswrapper[4810]: I1009 00:17:23.994919 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:24 crc kubenswrapper[4810]: I1009 00:17:24.175156 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp"] Oct 09 00:17:24 crc kubenswrapper[4810]: I1009 00:17:24.784497 4810 generic.go:334] "Generic (PLEG): container finished" podID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerID="9b885b739d0d652a952de6eb70f1277eda5b8784766d685ade51a1bcf8cb75c6" exitCode=0 Oct 09 00:17:24 crc kubenswrapper[4810]: I1009 00:17:24.784591 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" event={"ID":"6ea76cac-18f3-4d2b-baec-628d633a9f15","Type":"ContainerDied","Data":"9b885b739d0d652a952de6eb70f1277eda5b8784766d685ade51a1bcf8cb75c6"} Oct 09 00:17:24 crc kubenswrapper[4810]: I1009 00:17:24.785025 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" event={"ID":"6ea76cac-18f3-4d2b-baec-628d633a9f15","Type":"ContainerStarted","Data":"85ed1219542d074be0a478fac12fb2c2926bf9d2566c2b59e55e33acda826efb"} Oct 09 00:17:24 crc kubenswrapper[4810]: I1009 00:17:24.786797 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 00:17:26 crc kubenswrapper[4810]: I1009 00:17:26.800442 4810 generic.go:334] "Generic (PLEG): container finished" podID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerID="7aea075225d4c8cdb4a8bd4ad3be8d9d6aac1b3382ac7e1fe9abfb63c6bc494d" exitCode=0 Oct 09 00:17:26 crc kubenswrapper[4810]: I1009 00:17:26.800497 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" event={"ID":"6ea76cac-18f3-4d2b-baec-628d633a9f15","Type":"ContainerDied","Data":"7aea075225d4c8cdb4a8bd4ad3be8d9d6aac1b3382ac7e1fe9abfb63c6bc494d"} Oct 09 00:17:27 crc kubenswrapper[4810]: I1009 00:17:27.810619 4810 generic.go:334] "Generic (PLEG): container finished" podID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerID="ec3f2be0e03a28a74ad42a79252b942c72c4b0c2a45a3be2f4997da932c74040" exitCode=0 Oct 09 00:17:27 crc kubenswrapper[4810]: I1009 00:17:27.810767 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" event={"ID":"6ea76cac-18f3-4d2b-baec-628d633a9f15","Type":"ContainerDied","Data":"ec3f2be0e03a28a74ad42a79252b942c72c4b0c2a45a3be2f4997da932c74040"} Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.162699 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.356893 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhznn\" (UniqueName: \"kubernetes.io/projected/6ea76cac-18f3-4d2b-baec-628d633a9f15-kube-api-access-bhznn\") pod \"6ea76cac-18f3-4d2b-baec-628d633a9f15\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.357025 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-bundle\") pod \"6ea76cac-18f3-4d2b-baec-628d633a9f15\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.357134 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-util\") pod \"6ea76cac-18f3-4d2b-baec-628d633a9f15\" (UID: \"6ea76cac-18f3-4d2b-baec-628d633a9f15\") " Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.362776 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-bundle" (OuterVolumeSpecName: "bundle") pod "6ea76cac-18f3-4d2b-baec-628d633a9f15" (UID: "6ea76cac-18f3-4d2b-baec-628d633a9f15"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.365628 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea76cac-18f3-4d2b-baec-628d633a9f15-kube-api-access-bhznn" (OuterVolumeSpecName: "kube-api-access-bhznn") pod "6ea76cac-18f3-4d2b-baec-628d633a9f15" (UID: "6ea76cac-18f3-4d2b-baec-628d633a9f15"). InnerVolumeSpecName "kube-api-access-bhznn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.386548 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-util" (OuterVolumeSpecName: "util") pod "6ea76cac-18f3-4d2b-baec-628d633a9f15" (UID: "6ea76cac-18f3-4d2b-baec-628d633a9f15"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.458637 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhznn\" (UniqueName: \"kubernetes.io/projected/6ea76cac-18f3-4d2b-baec-628d633a9f15-kube-api-access-bhznn\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.458693 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.458713 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6ea76cac-18f3-4d2b-baec-628d633a9f15-util\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.832624 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" event={"ID":"6ea76cac-18f3-4d2b-baec-628d633a9f15","Type":"ContainerDied","Data":"85ed1219542d074be0a478fac12fb2c2926bf9d2566c2b59e55e33acda826efb"} Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.832690 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85ed1219542d074be0a478fac12fb2c2926bf9d2566c2b59e55e33acda826efb" Oct 09 00:17:29 crc kubenswrapper[4810]: I1009 00:17:29.832784 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.867605 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf"] Oct 09 00:17:30 crc kubenswrapper[4810]: E1009 00:17:30.868037 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="extract" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.868061 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="extract" Oct 09 00:17:30 crc kubenswrapper[4810]: E1009 00:17:30.868086 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="pull" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.868099 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="pull" Oct 09 00:17:30 crc kubenswrapper[4810]: E1009 00:17:30.868119 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="util" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.868132 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="util" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.868359 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ea76cac-18f3-4d2b-baec-628d633a9f15" containerName="extract" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.869759 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.873636 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.877713 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf"] Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.878975 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74kv8\" (UniqueName: \"kubernetes.io/projected/95a08308-84d4-4109-bb8c-245f2a80eb9e-kube-api-access-74kv8\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.879046 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.879125 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.980388 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74kv8\" (UniqueName: \"kubernetes.io/projected/95a08308-84d4-4109-bb8c-245f2a80eb9e-kube-api-access-74kv8\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.980517 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.980556 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.981235 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:30 crc kubenswrapper[4810]: I1009 00:17:30.981284 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.006798 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74kv8\" (UniqueName: \"kubernetes.io/projected/95a08308-84d4-4109-bb8c-245f2a80eb9e-kube-api-access-74kv8\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.203423 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.428548 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf"] Oct 09 00:17:31 crc kubenswrapper[4810]: W1009 00:17:31.434359 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95a08308_84d4_4109_bb8c_245f2a80eb9e.slice/crio-24b8c55fcc2235b7d65f5ff303b3e0bc693c2224faea6676a6684e51e9ce0a3b WatchSource:0}: Error finding container 24b8c55fcc2235b7d65f5ff303b3e0bc693c2224faea6676a6684e51e9ce0a3b: Status 404 returned error can't find the container with id 24b8c55fcc2235b7d65f5ff303b3e0bc693c2224faea6676a6684e51e9ce0a3b Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.644716 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t"] Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.646355 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.661199 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t"] Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.790289 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-bundle\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.790358 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zdgk\" (UniqueName: \"kubernetes.io/projected/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-kube-api-access-2zdgk\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.790454 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-util\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.848446 4810 generic.go:334] "Generic (PLEG): container finished" podID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerID="23236ec7e4d5eab95a0aca0c502ff556fb5ee74e28ade90a863ac0d29e6bbf5a" exitCode=0 Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.848492 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" event={"ID":"95a08308-84d4-4109-bb8c-245f2a80eb9e","Type":"ContainerDied","Data":"23236ec7e4d5eab95a0aca0c502ff556fb5ee74e28ade90a863ac0d29e6bbf5a"} Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.848515 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" event={"ID":"95a08308-84d4-4109-bb8c-245f2a80eb9e","Type":"ContainerStarted","Data":"24b8c55fcc2235b7d65f5ff303b3e0bc693c2224faea6676a6684e51e9ce0a3b"} Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.890984 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zdgk\" (UniqueName: \"kubernetes.io/projected/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-kube-api-access-2zdgk\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.891082 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-util\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.891108 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-bundle\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.891554 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-bundle\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.891818 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-util\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.911089 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zdgk\" (UniqueName: \"kubernetes.io/projected/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-kube-api-access-2zdgk\") pod \"dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:31 crc kubenswrapper[4810]: I1009 00:17:31.965076 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:32 crc kubenswrapper[4810]: I1009 00:17:32.404472 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t"] Oct 09 00:17:32 crc kubenswrapper[4810]: I1009 00:17:32.853914 4810 generic.go:334] "Generic (PLEG): container finished" podID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerID="deddbb779eac0a639714f6d66e48bda3b330675f068bc33cb65087064e13fb2d" exitCode=0 Oct 09 00:17:32 crc kubenswrapper[4810]: I1009 00:17:32.853966 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" event={"ID":"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5","Type":"ContainerDied","Data":"deddbb779eac0a639714f6d66e48bda3b330675f068bc33cb65087064e13fb2d"} Oct 09 00:17:32 crc kubenswrapper[4810]: I1009 00:17:32.854002 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" event={"ID":"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5","Type":"ContainerStarted","Data":"c6355cb471a8ef8225298da46fbfeb92e3a397b8f7353514a3e25bb1b26618ef"} Oct 09 00:17:33 crc kubenswrapper[4810]: I1009 00:17:33.858898 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" event={"ID":"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5","Type":"ContainerStarted","Data":"de91b3445247822d1a1ec76b8a96255662eb155a26b2572f07b098bbc2f87bdb"} Oct 09 00:17:34 crc kubenswrapper[4810]: I1009 00:17:34.865414 4810 generic.go:334] "Generic (PLEG): container finished" podID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerID="de91b3445247822d1a1ec76b8a96255662eb155a26b2572f07b098bbc2f87bdb" exitCode=0 Oct 09 00:17:34 crc kubenswrapper[4810]: I1009 00:17:34.865457 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" event={"ID":"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5","Type":"ContainerDied","Data":"de91b3445247822d1a1ec76b8a96255662eb155a26b2572f07b098bbc2f87bdb"} Oct 09 00:17:35 crc kubenswrapper[4810]: I1009 00:17:35.873436 4810 generic.go:334] "Generic (PLEG): container finished" podID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerID="cba5b4818fdb991f22e3ded2795e26181ed70f6e53d3459db2c48c36ad4906a9" exitCode=0 Oct 09 00:17:35 crc kubenswrapper[4810]: I1009 00:17:35.873721 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" event={"ID":"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5","Type":"ContainerDied","Data":"cba5b4818fdb991f22e3ded2795e26181ed70f6e53d3459db2c48c36ad4906a9"} Oct 09 00:17:36 crc kubenswrapper[4810]: I1009 00:17:36.880645 4810 generic.go:334] "Generic (PLEG): container finished" podID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerID="5c90056436bfc1d5d92866fe596945b945b54559aa64cef7390431cb0c7c1b07" exitCode=0 Oct 09 00:17:36 crc kubenswrapper[4810]: I1009 00:17:36.880737 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" event={"ID":"95a08308-84d4-4109-bb8c-245f2a80eb9e","Type":"ContainerDied","Data":"5c90056436bfc1d5d92866fe596945b945b54559aa64cef7390431cb0c7c1b07"} Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.136847 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.267665 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-bundle\") pod \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.267740 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-util\") pod \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.267853 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zdgk\" (UniqueName: \"kubernetes.io/projected/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-kube-api-access-2zdgk\") pod \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\" (UID: \"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5\") " Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.268449 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-bundle" (OuterVolumeSpecName: "bundle") pod "6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" (UID: "6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.274087 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-kube-api-access-2zdgk" (OuterVolumeSpecName: "kube-api-access-2zdgk") pod "6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" (UID: "6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5"). InnerVolumeSpecName "kube-api-access-2zdgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.283410 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-util" (OuterVolumeSpecName: "util") pod "6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" (UID: "6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.370038 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zdgk\" (UniqueName: \"kubernetes.io/projected/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-kube-api-access-2zdgk\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.370079 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.370094 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5-util\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.888124 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" event={"ID":"6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5","Type":"ContainerDied","Data":"c6355cb471a8ef8225298da46fbfeb92e3a397b8f7353514a3e25bb1b26618ef"} Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.888442 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6355cb471a8ef8225298da46fbfeb92e3a397b8f7353514a3e25bb1b26618ef" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.888163 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t" Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.890754 4810 generic.go:334] "Generic (PLEG): container finished" podID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerID="7ec151c1d9ecc00d9f2553a6d06d5be3b917b3733037554ff81cacf6491d5074" exitCode=0 Oct 09 00:17:37 crc kubenswrapper[4810]: I1009 00:17:37.890855 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" event={"ID":"95a08308-84d4-4109-bb8c-245f2a80eb9e","Type":"ContainerDied","Data":"7ec151c1d9ecc00d9f2553a6d06d5be3b917b3733037554ff81cacf6491d5074"} Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.255449 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594"] Oct 09 00:17:38 crc kubenswrapper[4810]: E1009 00:17:38.255669 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="pull" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.255683 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="pull" Oct 09 00:17:38 crc kubenswrapper[4810]: E1009 00:17:38.255698 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="extract" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.255706 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="extract" Oct 09 00:17:38 crc kubenswrapper[4810]: E1009 00:17:38.255719 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="util" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.255727 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="util" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.255842 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5" containerName="extract" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.256628 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.264960 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594"] Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.388164 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.388250 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.388415 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcs2w\" (UniqueName: \"kubernetes.io/projected/deccd7e9-1f1d-4950-90fb-05210cea2cff-kube-api-access-fcs2w\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.489712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.489860 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcs2w\" (UniqueName: \"kubernetes.io/projected/deccd7e9-1f1d-4950-90fb-05210cea2cff-kube-api-access-fcs2w\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.489928 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.490357 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-bundle\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.490618 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-util\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.507139 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcs2w\" (UniqueName: \"kubernetes.io/projected/deccd7e9-1f1d-4950-90fb-05210cea2cff-kube-api-access-fcs2w\") pod \"695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.573231 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.837394 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594"] Oct 09 00:17:38 crc kubenswrapper[4810]: W1009 00:17:38.842973 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddeccd7e9_1f1d_4950_90fb_05210cea2cff.slice/crio-fccc194eca0f8db2cf372c4e858c00866210fe52aff567b93a08fb7ca74d258c WatchSource:0}: Error finding container fccc194eca0f8db2cf372c4e858c00866210fe52aff567b93a08fb7ca74d258c: Status 404 returned error can't find the container with id fccc194eca0f8db2cf372c4e858c00866210fe52aff567b93a08fb7ca74d258c Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.902014 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-296t8"] Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.902671 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.903485 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" event={"ID":"deccd7e9-1f1d-4950-90fb-05210cea2cff","Type":"ContainerStarted","Data":"fccc194eca0f8db2cf372c4e858c00866210fe52aff567b93a08fb7ca74d258c"} Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.910459 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-l6l8t" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.910535 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.910667 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.925917 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-296t8"] Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.965250 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd"] Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.965867 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.974535 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-b6dnw" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.974783 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.979623 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg"] Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.980399 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:38 crc kubenswrapper[4810]: I1009 00:17:38.987516 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.027185 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.095650 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/79b6b416-cd91-46a8-9f07-6cd6e42ad85d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd\" (UID: \"79b6b416-cd91-46a8-9f07-6cd6e42ad85d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.095878 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/79b6b416-cd91-46a8-9f07-6cd6e42ad85d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd\" (UID: \"79b6b416-cd91-46a8-9f07-6cd6e42ad85d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.095932 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7wbd\" (UniqueName: \"kubernetes.io/projected/473feb2d-d624-4d00-8a07-965583adf996-kube-api-access-b7wbd\") pod \"obo-prometheus-operator-7c8cf85677-296t8\" (UID: \"473feb2d-d624-4d00-8a07-965583adf996\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.095962 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a70271d4-45c7-4ba1-a2ec-35b10e957709-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg\" (UID: \"a70271d4-45c7-4ba1-a2ec-35b10e957709\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.095999 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a70271d4-45c7-4ba1-a2ec-35b10e957709-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg\" (UID: \"a70271d4-45c7-4ba1-a2ec-35b10e957709\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.150368 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-qz8pk"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.161341 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.164327 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-bn667" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.164528 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.175437 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-qz8pk"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.199342 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7wbd\" (UniqueName: \"kubernetes.io/projected/473feb2d-d624-4d00-8a07-965583adf996-kube-api-access-b7wbd\") pod \"obo-prometheus-operator-7c8cf85677-296t8\" (UID: \"473feb2d-d624-4d00-8a07-965583adf996\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.199390 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a70271d4-45c7-4ba1-a2ec-35b10e957709-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg\" (UID: \"a70271d4-45c7-4ba1-a2ec-35b10e957709\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.199440 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a70271d4-45c7-4ba1-a2ec-35b10e957709-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg\" (UID: \"a70271d4-45c7-4ba1-a2ec-35b10e957709\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.199475 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/79b6b416-cd91-46a8-9f07-6cd6e42ad85d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd\" (UID: \"79b6b416-cd91-46a8-9f07-6cd6e42ad85d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.201176 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/79b6b416-cd91-46a8-9f07-6cd6e42ad85d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd\" (UID: \"79b6b416-cd91-46a8-9f07-6cd6e42ad85d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.204192 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/79b6b416-cd91-46a8-9f07-6cd6e42ad85d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd\" (UID: \"79b6b416-cd91-46a8-9f07-6cd6e42ad85d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.205384 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/79b6b416-cd91-46a8-9f07-6cd6e42ad85d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd\" (UID: \"79b6b416-cd91-46a8-9f07-6cd6e42ad85d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.205885 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a70271d4-45c7-4ba1-a2ec-35b10e957709-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg\" (UID: \"a70271d4-45c7-4ba1-a2ec-35b10e957709\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.207032 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a70271d4-45c7-4ba1-a2ec-35b10e957709-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg\" (UID: \"a70271d4-45c7-4ba1-a2ec-35b10e957709\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.221027 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7wbd\" (UniqueName: \"kubernetes.io/projected/473feb2d-d624-4d00-8a07-965583adf996-kube-api-access-b7wbd\") pod \"obo-prometheus-operator-7c8cf85677-296t8\" (UID: \"473feb2d-d624-4d00-8a07-965583adf996\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.229363 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.288789 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.299648 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.301876 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/67646063-cb29-41a3-ae59-baca62fca646-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-qz8pk\" (UID: \"67646063-cb29-41a3-ae59-baca62fca646\") " pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.302005 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffmht\" (UniqueName: \"kubernetes.io/projected/67646063-cb29-41a3-ae59-baca62fca646-kube-api-access-ffmht\") pod \"observability-operator-cc5f78dfc-qz8pk\" (UID: \"67646063-cb29-41a3-ae59-baca62fca646\") " pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.302983 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.333238 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-jdbpj"] Oct 09 00:17:39 crc kubenswrapper[4810]: E1009 00:17:39.333755 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="extract" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.333770 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="extract" Oct 09 00:17:39 crc kubenswrapper[4810]: E1009 00:17:39.333785 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="pull" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.333793 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="pull" Oct 09 00:17:39 crc kubenswrapper[4810]: E1009 00:17:39.333805 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="util" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.333813 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="util" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.333919 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="95a08308-84d4-4109-bb8c-245f2a80eb9e" containerName="extract" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.334259 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.336701 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-fmv8t" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.345196 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-jdbpj"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.403276 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/67646063-cb29-41a3-ae59-baca62fca646-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-qz8pk\" (UID: \"67646063-cb29-41a3-ae59-baca62fca646\") " pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.403327 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffmht\" (UniqueName: \"kubernetes.io/projected/67646063-cb29-41a3-ae59-baca62fca646-kube-api-access-ffmht\") pod \"observability-operator-cc5f78dfc-qz8pk\" (UID: \"67646063-cb29-41a3-ae59-baca62fca646\") " pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.407465 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/67646063-cb29-41a3-ae59-baca62fca646-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-qz8pk\" (UID: \"67646063-cb29-41a3-ae59-baca62fca646\") " pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.432615 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffmht\" (UniqueName: \"kubernetes.io/projected/67646063-cb29-41a3-ae59-baca62fca646-kube-api-access-ffmht\") pod \"observability-operator-cc5f78dfc-qz8pk\" (UID: \"67646063-cb29-41a3-ae59-baca62fca646\") " pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.480077 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.504168 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-util\") pod \"95a08308-84d4-4109-bb8c-245f2a80eb9e\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.504231 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74kv8\" (UniqueName: \"kubernetes.io/projected/95a08308-84d4-4109-bb8c-245f2a80eb9e-kube-api-access-74kv8\") pod \"95a08308-84d4-4109-bb8c-245f2a80eb9e\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.504275 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-bundle\") pod \"95a08308-84d4-4109-bb8c-245f2a80eb9e\" (UID: \"95a08308-84d4-4109-bb8c-245f2a80eb9e\") " Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.504431 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfbrl\" (UniqueName: \"kubernetes.io/projected/06e31d53-08d6-4cc9-9c5e-b2352bde041f-kube-api-access-pfbrl\") pod \"perses-operator-54bc95c9fb-jdbpj\" (UID: \"06e31d53-08d6-4cc9-9c5e-b2352bde041f\") " pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.504500 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/06e31d53-08d6-4cc9-9c5e-b2352bde041f-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-jdbpj\" (UID: \"06e31d53-08d6-4cc9-9c5e-b2352bde041f\") " pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.505326 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-bundle" (OuterVolumeSpecName: "bundle") pod "95a08308-84d4-4109-bb8c-245f2a80eb9e" (UID: "95a08308-84d4-4109-bb8c-245f2a80eb9e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.509955 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95a08308-84d4-4109-bb8c-245f2a80eb9e-kube-api-access-74kv8" (OuterVolumeSpecName: "kube-api-access-74kv8") pod "95a08308-84d4-4109-bb8c-245f2a80eb9e" (UID: "95a08308-84d4-4109-bb8c-245f2a80eb9e"). InnerVolumeSpecName "kube-api-access-74kv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.540759 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-util" (OuterVolumeSpecName: "util") pod "95a08308-84d4-4109-bb8c-245f2a80eb9e" (UID: "95a08308-84d4-4109-bb8c-245f2a80eb9e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.606281 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/06e31d53-08d6-4cc9-9c5e-b2352bde041f-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-jdbpj\" (UID: \"06e31d53-08d6-4cc9-9c5e-b2352bde041f\") " pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.606353 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfbrl\" (UniqueName: \"kubernetes.io/projected/06e31d53-08d6-4cc9-9c5e-b2352bde041f-kube-api-access-pfbrl\") pod \"perses-operator-54bc95c9fb-jdbpj\" (UID: \"06e31d53-08d6-4cc9-9c5e-b2352bde041f\") " pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.606423 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-util\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.606435 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74kv8\" (UniqueName: \"kubernetes.io/projected/95a08308-84d4-4109-bb8c-245f2a80eb9e-kube-api-access-74kv8\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.606447 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95a08308-84d4-4109-bb8c-245f2a80eb9e-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.607686 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/06e31d53-08d6-4cc9-9c5e-b2352bde041f-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-jdbpj\" (UID: \"06e31d53-08d6-4cc9-9c5e-b2352bde041f\") " pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.612595 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.619697 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg"] Oct 09 00:17:39 crc kubenswrapper[4810]: W1009 00:17:39.622193 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79b6b416_cd91_46a8_9f07_6cd6e42ad85d.slice/crio-caaa3db8c3e6f80c12b71f48b84338b5b36d01078ef662e857235199a89a58bf WatchSource:0}: Error finding container caaa3db8c3e6f80c12b71f48b84338b5b36d01078ef662e857235199a89a58bf: Status 404 returned error can't find the container with id caaa3db8c3e6f80c12b71f48b84338b5b36d01078ef662e857235199a89a58bf Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.627975 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfbrl\" (UniqueName: \"kubernetes.io/projected/06e31d53-08d6-4cc9-9c5e-b2352bde041f-kube-api-access-pfbrl\") pod \"perses-operator-54bc95c9fb-jdbpj\" (UID: \"06e31d53-08d6-4cc9-9c5e-b2352bde041f\") " pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: W1009 00:17:39.628421 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda70271d4_45c7_4ba1_a2ec_35b10e957709.slice/crio-dcac052c34ba0eab19a860ec768e99d7a21a4a46e18ad25b299e9516197cda98 WatchSource:0}: Error finding container dcac052c34ba0eab19a860ec768e99d7a21a4a46e18ad25b299e9516197cda98: Status 404 returned error can't find the container with id dcac052c34ba0eab19a860ec768e99d7a21a4a46e18ad25b299e9516197cda98 Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.658117 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.694035 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-296t8"] Oct 09 00:17:39 crc kubenswrapper[4810]: W1009 00:17:39.700942 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod473feb2d_d624_4d00_8a07_965583adf996.slice/crio-6f4d153c3fed09a3a0a697498c5d1976ac0bba506c55b9fe57e1e092104ffd49 WatchSource:0}: Error finding container 6f4d153c3fed09a3a0a697498c5d1976ac0bba506c55b9fe57e1e092104ffd49: Status 404 returned error can't find the container with id 6f4d153c3fed09a3a0a697498c5d1976ac0bba506c55b9fe57e1e092104ffd49 Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.738837 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-qz8pk"] Oct 09 00:17:39 crc kubenswrapper[4810]: W1009 00:17:39.742992 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67646063_cb29_41a3_ae59_baca62fca646.slice/crio-01dbb88e83ee6858b183bdeea4157c152587945c480958803d9057c2ca7e735b WatchSource:0}: Error finding container 01dbb88e83ee6858b183bdeea4157c152587945c480958803d9057c2ca7e735b: Status 404 returned error can't find the container with id 01dbb88e83ee6858b183bdeea4157c152587945c480958803d9057c2ca7e735b Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.855113 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-jdbpj"] Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.910281 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" event={"ID":"79b6b416-cd91-46a8-9f07-6cd6e42ad85d","Type":"ContainerStarted","Data":"caaa3db8c3e6f80c12b71f48b84338b5b36d01078ef662e857235199a89a58bf"} Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.911144 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" event={"ID":"06e31d53-08d6-4cc9-9c5e-b2352bde041f","Type":"ContainerStarted","Data":"fd43b9a573baec06ae3758db299c0474b3812b2397ea990e69375601b539d8d1"} Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.913270 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" event={"ID":"95a08308-84d4-4109-bb8c-245f2a80eb9e","Type":"ContainerDied","Data":"24b8c55fcc2235b7d65f5ff303b3e0bc693c2224faea6676a6684e51e9ce0a3b"} Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.913313 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24b8c55fcc2235b7d65f5ff303b3e0bc693c2224faea6676a6684e51e9ce0a3b" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.913366 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf" Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.921468 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" event={"ID":"67646063-cb29-41a3-ae59-baca62fca646","Type":"ContainerStarted","Data":"01dbb88e83ee6858b183bdeea4157c152587945c480958803d9057c2ca7e735b"} Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.922833 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" event={"ID":"a70271d4-45c7-4ba1-a2ec-35b10e957709","Type":"ContainerStarted","Data":"dcac052c34ba0eab19a860ec768e99d7a21a4a46e18ad25b299e9516197cda98"} Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.924708 4810 generic.go:334] "Generic (PLEG): container finished" podID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerID="3236b89c8863068da452f6932b5790e5f9dbbd20c2c173827f4bc6f490fd331e" exitCode=0 Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.924969 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" event={"ID":"deccd7e9-1f1d-4950-90fb-05210cea2cff","Type":"ContainerDied","Data":"3236b89c8863068da452f6932b5790e5f9dbbd20c2c173827f4bc6f490fd331e"} Oct 09 00:17:39 crc kubenswrapper[4810]: I1009 00:17:39.934022 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" event={"ID":"473feb2d-d624-4d00-8a07-965583adf996","Type":"ContainerStarted","Data":"6f4d153c3fed09a3a0a697498c5d1976ac0bba506c55b9fe57e1e092104ffd49"} Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.143018 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-vz7f2"] Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.144271 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.148095 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.148177 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-q8k7h" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.148363 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.158630 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-vz7f2"] Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.315434 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzt4j\" (UniqueName: \"kubernetes.io/projected/e3af9963-efac-4963-9512-34c76de2cf84-kube-api-access-rzt4j\") pod \"interconnect-operator-5bb49f789d-vz7f2\" (UID: \"e3af9963-efac-4963-9512-34c76de2cf84\") " pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.417147 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzt4j\" (UniqueName: \"kubernetes.io/projected/e3af9963-efac-4963-9512-34c76de2cf84-kube-api-access-rzt4j\") pod \"interconnect-operator-5bb49f789d-vz7f2\" (UID: \"e3af9963-efac-4963-9512-34c76de2cf84\") " pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.439386 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzt4j\" (UniqueName: \"kubernetes.io/projected/e3af9963-efac-4963-9512-34c76de2cf84-kube-api-access-rzt4j\") pod \"interconnect-operator-5bb49f789d-vz7f2\" (UID: \"e3af9963-efac-4963-9512-34c76de2cf84\") " pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" Oct 09 00:17:47 crc kubenswrapper[4810]: I1009 00:17:47.465295 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.475690 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-6df48f7c5c-6ll6s"] Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.476501 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.478917 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-ffqk5" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.479506 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.487888 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-6df48f7c5c-6ll6s"] Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.646353 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/be91caeb-c663-4619-8888-f4421774cf3a-webhook-cert\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.646413 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/be91caeb-c663-4619-8888-f4421774cf3a-apiservice-cert\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.646526 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rpfd\" (UniqueName: \"kubernetes.io/projected/be91caeb-c663-4619-8888-f4421774cf3a-kube-api-access-4rpfd\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.747848 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rpfd\" (UniqueName: \"kubernetes.io/projected/be91caeb-c663-4619-8888-f4421774cf3a-kube-api-access-4rpfd\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.747906 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/be91caeb-c663-4619-8888-f4421774cf3a-webhook-cert\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.747932 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/be91caeb-c663-4619-8888-f4421774cf3a-apiservice-cert\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.753245 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/be91caeb-c663-4619-8888-f4421774cf3a-apiservice-cert\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.753286 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/be91caeb-c663-4619-8888-f4421774cf3a-webhook-cert\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.773758 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rpfd\" (UniqueName: \"kubernetes.io/projected/be91caeb-c663-4619-8888-f4421774cf3a-kube-api-access-4rpfd\") pod \"elastic-operator-6df48f7c5c-6ll6s\" (UID: \"be91caeb-c663-4619-8888-f4421774cf3a\") " pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:49 crc kubenswrapper[4810]: I1009 00:17:49.796362 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" Oct 09 00:17:55 crc kubenswrapper[4810]: E1009 00:17:55.879178 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5" Oct 09 00:17:55 crc kubenswrapper[4810]: E1009 00:17:55.879844 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:8597c48fc71fc6ec8e87dbe40dace4dbb7b817c1039db608af76a0d90f7ac2d0,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.2.2,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b7wbd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-7c8cf85677-296t8_openshift-operators(473feb2d-d624-4d00-8a07-965583adf996): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 09 00:17:55 crc kubenswrapper[4810]: E1009 00:17:55.881169 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" podUID="473feb2d-d624-4d00-8a07-965583adf996" Oct 09 00:17:56 crc kubenswrapper[4810]: E1009 00:17:56.074389 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e2681bce57dc9c15701f5591532c2dfe8f19778606661339553a28dc003dbca5\\\"\"" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" podUID="473feb2d-d624-4d00-8a07-965583adf996" Oct 09 00:17:56 crc kubenswrapper[4810]: I1009 00:17:56.175764 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-6df48f7c5c-6ll6s"] Oct 09 00:17:56 crc kubenswrapper[4810]: I1009 00:17:56.216044 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-vz7f2"] Oct 09 00:17:56 crc kubenswrapper[4810]: W1009 00:17:56.227652 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3af9963_efac_4963_9512_34c76de2cf84.slice/crio-cac8b54c6023af5519ca66a5a25c1880e22f249c3682bc0b6f7fe5cbd206cea5 WatchSource:0}: Error finding container cac8b54c6023af5519ca66a5a25c1880e22f249c3682bc0b6f7fe5cbd206cea5: Status 404 returned error can't find the container with id cac8b54c6023af5519ca66a5a25c1880e22f249c3682bc0b6f7fe5cbd206cea5 Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.055980 4810 generic.go:334] "Generic (PLEG): container finished" podID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerID="d27aeac4ac3ce219b50fa90df927d93275a8dddae1447875491b9736a98e7ec6" exitCode=0 Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.056020 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" event={"ID":"deccd7e9-1f1d-4950-90fb-05210cea2cff","Type":"ContainerDied","Data":"d27aeac4ac3ce219b50fa90df927d93275a8dddae1447875491b9736a98e7ec6"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.058700 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" event={"ID":"79b6b416-cd91-46a8-9f07-6cd6e42ad85d","Type":"ContainerStarted","Data":"58382ca70127bfebe54be9ad98a18f6a6e3070150b773e91abaf0201bcf384ba"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.060063 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" event={"ID":"06e31d53-08d6-4cc9-9c5e-b2352bde041f","Type":"ContainerStarted","Data":"5c6e0ec5b1842d9fbab57375d3ec09629048ed017e88eb7a4b476af5c1e23d7d"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.060185 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.060836 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" event={"ID":"be91caeb-c663-4619-8888-f4421774cf3a","Type":"ContainerStarted","Data":"9fca15e89e71173d35dab87fa4d425a0386094699e4b91620a15d484cf42df18"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.062585 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" event={"ID":"67646063-cb29-41a3-ae59-baca62fca646","Type":"ContainerStarted","Data":"af4b13bedb72e266025a39cd115b5e7828de66f0835030a1a934ddd366e26687"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.063454 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.064225 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" event={"ID":"e3af9963-efac-4963-9512-34c76de2cf84","Type":"ContainerStarted","Data":"cac8b54c6023af5519ca66a5a25c1880e22f249c3682bc0b6f7fe5cbd206cea5"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.064923 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.069475 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" event={"ID":"a70271d4-45c7-4ba1-a2ec-35b10e957709","Type":"ContainerStarted","Data":"0bc6a17e53aa6db66bd01172d088f956c95d6138894a64bda271bb30353f5302"} Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.127564 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd" podStartSLOduration=2.9179012909999997 podStartE2EDuration="19.127543781s" podCreationTimestamp="2025-10-09 00:17:38 +0000 UTC" firstStartedPulling="2025-10-09 00:17:39.625605209 +0000 UTC m=+657.151243910" lastFinishedPulling="2025-10-09 00:17:55.835247699 +0000 UTC m=+673.360886400" observedRunningTime="2025-10-09 00:17:57.121087534 +0000 UTC m=+674.646726255" watchObservedRunningTime="2025-10-09 00:17:57.127543781 +0000 UTC m=+674.653182502" Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.127738 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" podStartSLOduration=2.058977863 podStartE2EDuration="18.127731837s" podCreationTimestamp="2025-10-09 00:17:39 +0000 UTC" firstStartedPulling="2025-10-09 00:17:39.862320537 +0000 UTC m=+657.387959238" lastFinishedPulling="2025-10-09 00:17:55.931074511 +0000 UTC m=+673.456713212" observedRunningTime="2025-10-09 00:17:57.097946125 +0000 UTC m=+674.623584836" watchObservedRunningTime="2025-10-09 00:17:57.127731837 +0000 UTC m=+674.653370548" Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.173872 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg" podStartSLOduration=2.87637 podStartE2EDuration="19.17380909s" podCreationTimestamp="2025-10-09 00:17:38 +0000 UTC" firstStartedPulling="2025-10-09 00:17:39.633711274 +0000 UTC m=+657.159349975" lastFinishedPulling="2025-10-09 00:17:55.931150364 +0000 UTC m=+673.456789065" observedRunningTime="2025-10-09 00:17:57.146240282 +0000 UTC m=+674.671878983" watchObservedRunningTime="2025-10-09 00:17:57.17380909 +0000 UTC m=+674.699447801" Oct 09 00:17:57 crc kubenswrapper[4810]: I1009 00:17:57.178007 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-qz8pk" podStartSLOduration=1.9916713750000001 podStartE2EDuration="18.1779846s" podCreationTimestamp="2025-10-09 00:17:39 +0000 UTC" firstStartedPulling="2025-10-09 00:17:39.744389386 +0000 UTC m=+657.270028087" lastFinishedPulling="2025-10-09 00:17:55.930702611 +0000 UTC m=+673.456341312" observedRunningTime="2025-10-09 00:17:57.173317255 +0000 UTC m=+674.698955986" watchObservedRunningTime="2025-10-09 00:17:57.1779846 +0000 UTC m=+674.703623301" Oct 09 00:17:58 crc kubenswrapper[4810]: I1009 00:17:58.076165 4810 generic.go:334] "Generic (PLEG): container finished" podID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerID="a4a31535bdbc2f749efa03510b31a0e3356bdc7a12d603025a62be3deb2f2b29" exitCode=0 Oct 09 00:17:58 crc kubenswrapper[4810]: I1009 00:17:58.077192 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" event={"ID":"deccd7e9-1f1d-4950-90fb-05210cea2cff","Type":"ContainerDied","Data":"a4a31535bdbc2f749efa03510b31a0e3356bdc7a12d603025a62be3deb2f2b29"} Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.717014 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.824506 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-bundle\") pod \"deccd7e9-1f1d-4950-90fb-05210cea2cff\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.824786 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-util\") pod \"deccd7e9-1f1d-4950-90fb-05210cea2cff\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.824837 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcs2w\" (UniqueName: \"kubernetes.io/projected/deccd7e9-1f1d-4950-90fb-05210cea2cff-kube-api-access-fcs2w\") pod \"deccd7e9-1f1d-4950-90fb-05210cea2cff\" (UID: \"deccd7e9-1f1d-4950-90fb-05210cea2cff\") " Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.825768 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-bundle" (OuterVolumeSpecName: "bundle") pod "deccd7e9-1f1d-4950-90fb-05210cea2cff" (UID: "deccd7e9-1f1d-4950-90fb-05210cea2cff"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.837989 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deccd7e9-1f1d-4950-90fb-05210cea2cff-kube-api-access-fcs2w" (OuterVolumeSpecName: "kube-api-access-fcs2w") pod "deccd7e9-1f1d-4950-90fb-05210cea2cff" (UID: "deccd7e9-1f1d-4950-90fb-05210cea2cff"). InnerVolumeSpecName "kube-api-access-fcs2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.844065 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-util" (OuterVolumeSpecName: "util") pod "deccd7e9-1f1d-4950-90fb-05210cea2cff" (UID: "deccd7e9-1f1d-4950-90fb-05210cea2cff"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.929783 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.929813 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/deccd7e9-1f1d-4950-90fb-05210cea2cff-util\") on node \"crc\" DevicePath \"\"" Oct 09 00:17:59 crc kubenswrapper[4810]: I1009 00:17:59.929840 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcs2w\" (UniqueName: \"kubernetes.io/projected/deccd7e9-1f1d-4950-90fb-05210cea2cff-kube-api-access-fcs2w\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:00 crc kubenswrapper[4810]: I1009 00:18:00.089412 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" event={"ID":"be91caeb-c663-4619-8888-f4421774cf3a","Type":"ContainerStarted","Data":"ebbd0110a7468165fc5b2620292c4d4f7cf14bcb27f8943ea47ee1f0deecc000"} Oct 09 00:18:00 crc kubenswrapper[4810]: I1009 00:18:00.092148 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" event={"ID":"deccd7e9-1f1d-4950-90fb-05210cea2cff","Type":"ContainerDied","Data":"fccc194eca0f8db2cf372c4e858c00866210fe52aff567b93a08fb7ca74d258c"} Oct 09 00:18:00 crc kubenswrapper[4810]: I1009 00:18:00.092172 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fccc194eca0f8db2cf372c4e858c00866210fe52aff567b93a08fb7ca74d258c" Oct 09 00:18:00 crc kubenswrapper[4810]: I1009 00:18:00.092232 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594" Oct 09 00:18:00 crc kubenswrapper[4810]: I1009 00:18:00.111535 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-6df48f7c5c-6ll6s" podStartSLOduration=7.571322911 podStartE2EDuration="11.111507449s" podCreationTimestamp="2025-10-09 00:17:49 +0000 UTC" firstStartedPulling="2025-10-09 00:17:56.186092217 +0000 UTC m=+673.711730918" lastFinishedPulling="2025-10-09 00:17:59.726276765 +0000 UTC m=+677.251915456" observedRunningTime="2025-10-09 00:18:00.106990958 +0000 UTC m=+677.632629659" watchObservedRunningTime="2025-10-09 00:18:00.111507449 +0000 UTC m=+677.637146170" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.488022 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 09 00:18:01 crc kubenswrapper[4810]: E1009 00:18:01.488487 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="pull" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.488505 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="pull" Oct 09 00:18:01 crc kubenswrapper[4810]: E1009 00:18:01.488640 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="util" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.488647 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="util" Oct 09 00:18:01 crc kubenswrapper[4810]: E1009 00:18:01.488659 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="extract" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.488665 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="extract" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.488771 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="deccd7e9-1f1d-4950-90fb-05210cea2cff" containerName="extract" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.489495 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.491711 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.492119 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-4gdm9" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.492247 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.492384 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.492515 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.494924 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.495176 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.495388 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.495512 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.508983 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.649815 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/da88a141-8779-4b30-9ea7-3477d4ad9ee5-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.649874 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.649910 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.649931 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.649949 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.649970 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650000 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650121 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650144 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650173 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650189 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650207 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650225 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650251 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.650281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.751297 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.751913 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.751971 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752020 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752056 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752081 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752107 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752133 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752168 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752200 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/da88a141-8779-4b30-9ea7-3477d4ad9ee5-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752217 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752243 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752265 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752289 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.752316 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.753276 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.753347 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.753329 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.753451 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.754345 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.754552 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.754580 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.755016 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.757809 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/da88a141-8779-4b30-9ea7-3477d4ad9ee5-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.757971 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.758433 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.758918 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.759400 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.764456 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.766656 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/da88a141-8779-4b30-9ea7-3477d4ad9ee5-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"da88a141-8779-4b30-9ea7-3477d4ad9ee5\") " pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:01 crc kubenswrapper[4810]: I1009 00:18:01.805191 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:05 crc kubenswrapper[4810]: I1009 00:18:05.967162 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 09 00:18:05 crc kubenswrapper[4810]: W1009 00:18:05.976301 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda88a141_8779_4b30_9ea7_3477d4ad9ee5.slice/crio-71a7fdfa6d73904c3c802a2a8281791c03ac149cb87611fbeb51efa4973f4ec4 WatchSource:0}: Error finding container 71a7fdfa6d73904c3c802a2a8281791c03ac149cb87611fbeb51efa4973f4ec4: Status 404 returned error can't find the container with id 71a7fdfa6d73904c3c802a2a8281791c03ac149cb87611fbeb51efa4973f4ec4 Oct 09 00:18:06 crc kubenswrapper[4810]: I1009 00:18:06.127176 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" event={"ID":"e3af9963-efac-4963-9512-34c76de2cf84","Type":"ContainerStarted","Data":"9f95e241e79fad73567c283c072d3a58b4d2a3df3b08c360f7386507288dc094"} Oct 09 00:18:06 crc kubenswrapper[4810]: I1009 00:18:06.128285 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"da88a141-8779-4b30-9ea7-3477d4ad9ee5","Type":"ContainerStarted","Data":"71a7fdfa6d73904c3c802a2a8281791c03ac149cb87611fbeb51efa4973f4ec4"} Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.666686 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-jdbpj" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.683272 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-vz7f2" podStartSLOduration=13.141143516 podStartE2EDuration="22.683255913s" podCreationTimestamp="2025-10-09 00:17:47 +0000 UTC" firstStartedPulling="2025-10-09 00:17:56.23389448 +0000 UTC m=+673.759533181" lastFinishedPulling="2025-10-09 00:18:05.776006877 +0000 UTC m=+683.301645578" observedRunningTime="2025-10-09 00:18:06.149211583 +0000 UTC m=+683.674850284" watchObservedRunningTime="2025-10-09 00:18:09.683255913 +0000 UTC m=+687.208894624" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.742736 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.743937 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.746459 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-ca" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.746628 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.748157 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-sys-config" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.749910 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-global-ca" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.754174 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.789514 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.789904 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.789936 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.789972 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.789996 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790139 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790196 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmxfz\" (UniqueName: \"kubernetes.io/projected/d67e20d4-b292-4b9d-9674-8444c8912941-kube-api-access-dmxfz\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790266 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790284 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790304 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790428 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.790473 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891579 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891619 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891649 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891675 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891695 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891715 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmxfz\" (UniqueName: \"kubernetes.io/projected/d67e20d4-b292-4b9d-9674-8444c8912941-kube-api-access-dmxfz\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891745 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891762 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891779 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891800 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891831 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.891859 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892220 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892254 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892312 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892347 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892583 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892615 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892755 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.892936 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.893185 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.902607 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.911485 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmxfz\" (UniqueName: \"kubernetes.io/projected/d67e20d4-b292-4b9d-9674-8444c8912941-kube-api-access-dmxfz\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:09 crc kubenswrapper[4810]: I1009 00:18:09.913194 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-1-build\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:10 crc kubenswrapper[4810]: I1009 00:18:10.070935 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:11 crc kubenswrapper[4810]: I1009 00:18:11.210501 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 09 00:18:12 crc kubenswrapper[4810]: I1009 00:18:12.171327 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" event={"ID":"473feb2d-d624-4d00-8a07-965583adf996","Type":"ContainerStarted","Data":"fc9ec6eefb69aaa6d8b966ced1dd023baefd6f58509fa2e27399dc5ddfda4c9a"} Oct 09 00:18:12 crc kubenswrapper[4810]: I1009 00:18:12.174171 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"d67e20d4-b292-4b9d-9674-8444c8912941","Type":"ContainerStarted","Data":"3fd729c64442d039a440db30ebd3562835c727e4c1e9c0dd5906a865499318df"} Oct 09 00:18:12 crc kubenswrapper[4810]: I1009 00:18:12.191405 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-296t8" podStartSLOduration=2.807720355 podStartE2EDuration="34.191388976s" podCreationTimestamp="2025-10-09 00:17:38 +0000 UTC" firstStartedPulling="2025-10-09 00:17:39.705970074 +0000 UTC m=+657.231608775" lastFinishedPulling="2025-10-09 00:18:11.089638695 +0000 UTC m=+688.615277396" observedRunningTime="2025-10-09 00:18:12.190092749 +0000 UTC m=+689.715731480" watchObservedRunningTime="2025-10-09 00:18:12.191388976 +0000 UTC m=+689.717027667" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.640552 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7"] Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.641678 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.643773 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-5xm5j" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.644019 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.644124 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.661838 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7"] Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.738086 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.825898 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rv86\" (UniqueName: \"kubernetes.io/projected/c2d3ad77-5afa-4a99-bec0-a55f52d1fa51-kube-api-access-5rv86\") pod \"cert-manager-operator-controller-manager-96d66748b-pngw7\" (UID: \"c2d3ad77-5afa-4a99-bec0-a55f52d1fa51\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.926916 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rv86\" (UniqueName: \"kubernetes.io/projected/c2d3ad77-5afa-4a99-bec0-a55f52d1fa51-kube-api-access-5rv86\") pod \"cert-manager-operator-controller-manager-96d66748b-pngw7\" (UID: \"c2d3ad77-5afa-4a99-bec0-a55f52d1fa51\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.966625 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rv86\" (UniqueName: \"kubernetes.io/projected/c2d3ad77-5afa-4a99-bec0-a55f52d1fa51-kube-api-access-5rv86\") pod \"cert-manager-operator-controller-manager-96d66748b-pngw7\" (UID: \"c2d3ad77-5afa-4a99-bec0-a55f52d1fa51\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" Oct 09 00:18:19 crc kubenswrapper[4810]: I1009 00:18:19.968256 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.184696 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.185043 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.447249 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.448464 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.451262 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-sys-config" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.452153 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-global-ca" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.452376 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-ca" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468035 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468126 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468165 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468194 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468232 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468266 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwd5m\" (UniqueName: \"kubernetes.io/projected/5555ec6f-be4d-40a7-81b7-d542809f29ca-kube-api-access-xwd5m\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468393 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468423 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468449 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468466 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468469 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.468512 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569295 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569359 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569384 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569404 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569434 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569460 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwd5m\" (UniqueName: \"kubernetes.io/projected/5555ec6f-be4d-40a7-81b7-d542809f29ca-kube-api-access-xwd5m\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569480 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569497 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569514 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569513 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569561 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569534 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569612 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569970 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.570108 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.569652 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.570362 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.570575 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.570978 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.571035 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.572851 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.572853 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.573473 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.589310 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwd5m\" (UniqueName: \"kubernetes.io/projected/5555ec6f-be4d-40a7-81b7-d542809f29ca-kube-api-access-xwd5m\") pod \"service-telemetry-operator-2-build\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:21 crc kubenswrapper[4810]: I1009 00:18:21.765745 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:18:22 crc kubenswrapper[4810]: I1009 00:18:22.628959 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Oct 09 00:18:22 crc kubenswrapper[4810]: W1009 00:18:22.636738 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5555ec6f_be4d_40a7_81b7_d542809f29ca.slice/crio-386f3e98000847d0ef2318c0a094575bcbba6f94c6dff30e623b52d905d0baae WatchSource:0}: Error finding container 386f3e98000847d0ef2318c0a094575bcbba6f94c6dff30e623b52d905d0baae: Status 404 returned error can't find the container with id 386f3e98000847d0ef2318c0a094575bcbba6f94c6dff30e623b52d905d0baae Oct 09 00:18:22 crc kubenswrapper[4810]: I1009 00:18:22.683692 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7"] Oct 09 00:18:22 crc kubenswrapper[4810]: W1009 00:18:22.685794 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2d3ad77_5afa_4a99_bec0_a55f52d1fa51.slice/crio-a0765cc2aa12ed143bb7ee5d0875cb7db217c1fa75da04ae7d61649617338580 WatchSource:0}: Error finding container a0765cc2aa12ed143bb7ee5d0875cb7db217c1fa75da04ae7d61649617338580: Status 404 returned error can't find the container with id a0765cc2aa12ed143bb7ee5d0875cb7db217c1fa75da04ae7d61649617338580 Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.232046 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"da88a141-8779-4b30-9ea7-3477d4ad9ee5","Type":"ContainerStarted","Data":"ff71afc6c97c4ec479cfbe88810104b316643702d7cc77427ee59bada7875ff8"} Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.235091 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" event={"ID":"c2d3ad77-5afa-4a99-bec0-a55f52d1fa51","Type":"ContainerStarted","Data":"a0765cc2aa12ed143bb7ee5d0875cb7db217c1fa75da04ae7d61649617338580"} Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.236290 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerStarted","Data":"8e525774766a53a552d44851f48f15b6f618caf7370f2aaea1a39470189a05db"} Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.236322 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerStarted","Data":"386f3e98000847d0ef2318c0a094575bcbba6f94c6dff30e623b52d905d0baae"} Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.240182 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"d67e20d4-b292-4b9d-9674-8444c8912941","Type":"ContainerStarted","Data":"bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758"} Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.240294 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-1-build" podUID="d67e20d4-b292-4b9d-9674-8444c8912941" containerName="manage-dockerfile" containerID="cri-o://bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758" gracePeriod=30 Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.603786 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_d67e20d4-b292-4b9d-9674-8444c8912941/manage-dockerfile/0.log" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.604103 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703257 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-buildcachedir\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703319 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-run\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703350 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-ca-bundles\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703387 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmxfz\" (UniqueName: \"kubernetes.io/projected/d67e20d4-b292-4b9d-9674-8444c8912941-kube-api-access-dmxfz\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703391 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703418 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-proxy-ca-bundles\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703441 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-build-blob-cache\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703472 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-root\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703541 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-pull\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703567 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-buildworkdir\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703621 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-system-configs\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703664 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-node-pullsecrets\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703688 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-push\") pod \"d67e20d4-b292-4b9d-9674-8444c8912941\" (UID: \"d67e20d4-b292-4b9d-9674-8444c8912941\") " Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.703735 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704001 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704019 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704208 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704229 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704497 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704689 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704702 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704809 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.704773 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.708899 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.709076 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.716931 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d67e20d4-b292-4b9d-9674-8444c8912941-kube-api-access-dmxfz" (OuterVolumeSpecName: "kube-api-access-dmxfz") pod "d67e20d4-b292-4b9d-9674-8444c8912941" (UID: "d67e20d4-b292-4b9d-9674-8444c8912941"). InnerVolumeSpecName "kube-api-access-dmxfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806478 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806508 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmxfz\" (UniqueName: \"kubernetes.io/projected/d67e20d4-b292-4b9d-9674-8444c8912941-kube-api-access-dmxfz\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806523 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806534 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806546 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806558 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806567 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/d67e20d4-b292-4b9d-9674-8444c8912941-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806575 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/d67e20d4-b292-4b9d-9674-8444c8912941-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806583 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d67e20d4-b292-4b9d-9674-8444c8912941-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.806602 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/d67e20d4-b292-4b9d-9674-8444c8912941-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.808137 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 09 00:18:23 crc kubenswrapper[4810]: I1009 00:18:23.846800 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.247507 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_d67e20d4-b292-4b9d-9674-8444c8912941/manage-dockerfile/0.log" Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.247570 4810 generic.go:334] "Generic (PLEG): container finished" podID="d67e20d4-b292-4b9d-9674-8444c8912941" containerID="bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758" exitCode=1 Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.247683 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.247745 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"d67e20d4-b292-4b9d-9674-8444c8912941","Type":"ContainerDied","Data":"bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758"} Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.247790 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"d67e20d4-b292-4b9d-9674-8444c8912941","Type":"ContainerDied","Data":"3fd729c64442d039a440db30ebd3562835c727e4c1e9c0dd5906a865499318df"} Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.247853 4810 scope.go:117] "RemoveContainer" containerID="bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758" Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.267138 4810 scope.go:117] "RemoveContainer" containerID="bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758" Oct 09 00:18:24 crc kubenswrapper[4810]: E1009 00:18:24.267714 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758\": container with ID starting with bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758 not found: ID does not exist" containerID="bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758" Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.267861 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758"} err="failed to get container status \"bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758\": rpc error: code = NotFound desc = could not find container \"bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758\": container with ID starting with bf9042b210088d65e203418bac8be472e9772b6905622c131571262dfddda758 not found: ID does not exist" Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.275196 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 09 00:18:24 crc kubenswrapper[4810]: I1009 00:18:24.286129 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Oct 09 00:18:25 crc kubenswrapper[4810]: I1009 00:18:25.256354 4810 generic.go:334] "Generic (PLEG): container finished" podID="da88a141-8779-4b30-9ea7-3477d4ad9ee5" containerID="ff71afc6c97c4ec479cfbe88810104b316643702d7cc77427ee59bada7875ff8" exitCode=0 Oct 09 00:18:25 crc kubenswrapper[4810]: I1009 00:18:25.270402 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d67e20d4-b292-4b9d-9674-8444c8912941" path="/var/lib/kubelet/pods/d67e20d4-b292-4b9d-9674-8444c8912941/volumes" Oct 09 00:18:25 crc kubenswrapper[4810]: I1009 00:18:25.270789 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"da88a141-8779-4b30-9ea7-3477d4ad9ee5","Type":"ContainerDied","Data":"ff71afc6c97c4ec479cfbe88810104b316643702d7cc77427ee59bada7875ff8"} Oct 09 00:18:30 crc kubenswrapper[4810]: I1009 00:18:30.283036 4810 generic.go:334] "Generic (PLEG): container finished" podID="da88a141-8779-4b30-9ea7-3477d4ad9ee5" containerID="0363f6610338622c8da64c90b451bd6a4472b291262e48599b0c494ebf6ce282" exitCode=0 Oct 09 00:18:30 crc kubenswrapper[4810]: I1009 00:18:30.283392 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"da88a141-8779-4b30-9ea7-3477d4ad9ee5","Type":"ContainerDied","Data":"0363f6610338622c8da64c90b451bd6a4472b291262e48599b0c494ebf6ce282"} Oct 09 00:18:30 crc kubenswrapper[4810]: I1009 00:18:30.285249 4810 generic.go:334] "Generic (PLEG): container finished" podID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerID="8e525774766a53a552d44851f48f15b6f618caf7370f2aaea1a39470189a05db" exitCode=0 Oct 09 00:18:30 crc kubenswrapper[4810]: I1009 00:18:30.285274 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerDied","Data":"8e525774766a53a552d44851f48f15b6f618caf7370f2aaea1a39470189a05db"} Oct 09 00:18:31 crc kubenswrapper[4810]: I1009 00:18:31.292612 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerStarted","Data":"1ab4bdad984257bc2ec169b7a381734e84d00a51ec3a2dfec384a40c4c07e6a5"} Oct 09 00:18:32 crc kubenswrapper[4810]: I1009 00:18:32.299339 4810 generic.go:334] "Generic (PLEG): container finished" podID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerID="1ab4bdad984257bc2ec169b7a381734e84d00a51ec3a2dfec384a40c4c07e6a5" exitCode=0 Oct 09 00:18:32 crc kubenswrapper[4810]: I1009 00:18:32.299656 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerDied","Data":"1ab4bdad984257bc2ec169b7a381734e84d00a51ec3a2dfec384a40c4c07e6a5"} Oct 09 00:18:32 crc kubenswrapper[4810]: I1009 00:18:32.307195 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"da88a141-8779-4b30-9ea7-3477d4ad9ee5","Type":"ContainerStarted","Data":"9d5c7d6ae63cc3b9a7840bc58a9ba734a94a31bf5ea1e4224b7f9e250f95d227"} Oct 09 00:18:32 crc kubenswrapper[4810]: I1009 00:18:32.307786 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:32 crc kubenswrapper[4810]: I1009 00:18:32.341171 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_5555ec6f-be4d-40a7-81b7-d542809f29ca/manage-dockerfile/0.log" Oct 09 00:18:32 crc kubenswrapper[4810]: I1009 00:18:32.361900 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=14.953960414 podStartE2EDuration="31.361870019s" podCreationTimestamp="2025-10-09 00:18:01 +0000 UTC" firstStartedPulling="2025-10-09 00:18:05.979001829 +0000 UTC m=+683.504640570" lastFinishedPulling="2025-10-09 00:18:22.386911474 +0000 UTC m=+699.912550175" observedRunningTime="2025-10-09 00:18:32.357304127 +0000 UTC m=+709.882942838" watchObservedRunningTime="2025-10-09 00:18:32.361870019 +0000 UTC m=+709.887508750" Oct 09 00:18:34 crc kubenswrapper[4810]: I1009 00:18:34.323720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerStarted","Data":"551fa9d937333f5e729c049f3a12ac7999fea347a035f5d04257bd4254cddcd6"} Oct 09 00:18:34 crc kubenswrapper[4810]: I1009 00:18:34.334067 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" event={"ID":"c2d3ad77-5afa-4a99-bec0-a55f52d1fa51","Type":"ContainerStarted","Data":"20b263e9814627f0a80014c7d749518d6330ac5615f24b7259213d1943788cb4"} Oct 09 00:18:34 crc kubenswrapper[4810]: I1009 00:18:34.367278 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-2-build" podStartSLOduration=13.36726126 podStartE2EDuration="13.36726126s" podCreationTimestamp="2025-10-09 00:18:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:18:34.360284398 +0000 UTC m=+711.885923109" watchObservedRunningTime="2025-10-09 00:18:34.36726126 +0000 UTC m=+711.892899961" Oct 09 00:18:34 crc kubenswrapper[4810]: I1009 00:18:34.383185 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-96d66748b-pngw7" podStartSLOduration=4.781981179 podStartE2EDuration="15.38316423s" podCreationTimestamp="2025-10-09 00:18:19 +0000 UTC" firstStartedPulling="2025-10-09 00:18:22.689201258 +0000 UTC m=+700.214839969" lastFinishedPulling="2025-10-09 00:18:33.290384319 +0000 UTC m=+710.816023020" observedRunningTime="2025-10-09 00:18:34.382727438 +0000 UTC m=+711.908366169" watchObservedRunningTime="2025-10-09 00:18:34.38316423 +0000 UTC m=+711.908802931" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.804753 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-pvdjv"] Oct 09 00:18:36 crc kubenswrapper[4810]: E1009 00:18:36.805095 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d67e20d4-b292-4b9d-9674-8444c8912941" containerName="manage-dockerfile" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.805112 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d67e20d4-b292-4b9d-9674-8444c8912941" containerName="manage-dockerfile" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.805225 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d67e20d4-b292-4b9d-9674-8444c8912941" containerName="manage-dockerfile" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.805846 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.811325 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-njg4q" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.811521 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.811871 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.822721 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-pvdjv"] Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.897566 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7650fe90-e7bd-44ff-9eb1-92299bae19be-bound-sa-token\") pod \"cert-manager-webhook-d969966f-pvdjv\" (UID: \"7650fe90-e7bd-44ff-9eb1-92299bae19be\") " pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.897670 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwrqw\" (UniqueName: \"kubernetes.io/projected/7650fe90-e7bd-44ff-9eb1-92299bae19be-kube-api-access-jwrqw\") pod \"cert-manager-webhook-d969966f-pvdjv\" (UID: \"7650fe90-e7bd-44ff-9eb1-92299bae19be\") " pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.998969 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwrqw\" (UniqueName: \"kubernetes.io/projected/7650fe90-e7bd-44ff-9eb1-92299bae19be-kube-api-access-jwrqw\") pod \"cert-manager-webhook-d969966f-pvdjv\" (UID: \"7650fe90-e7bd-44ff-9eb1-92299bae19be\") " pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:36 crc kubenswrapper[4810]: I1009 00:18:36.999043 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7650fe90-e7bd-44ff-9eb1-92299bae19be-bound-sa-token\") pod \"cert-manager-webhook-d969966f-pvdjv\" (UID: \"7650fe90-e7bd-44ff-9eb1-92299bae19be\") " pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:37 crc kubenswrapper[4810]: I1009 00:18:37.019073 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwrqw\" (UniqueName: \"kubernetes.io/projected/7650fe90-e7bd-44ff-9eb1-92299bae19be-kube-api-access-jwrqw\") pod \"cert-manager-webhook-d969966f-pvdjv\" (UID: \"7650fe90-e7bd-44ff-9eb1-92299bae19be\") " pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:37 crc kubenswrapper[4810]: I1009 00:18:37.022095 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7650fe90-e7bd-44ff-9eb1-92299bae19be-bound-sa-token\") pod \"cert-manager-webhook-d969966f-pvdjv\" (UID: \"7650fe90-e7bd-44ff-9eb1-92299bae19be\") " pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:37 crc kubenswrapper[4810]: I1009 00:18:37.123263 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:37 crc kubenswrapper[4810]: I1009 00:18:37.690445 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-d969966f-pvdjv"] Oct 09 00:18:37 crc kubenswrapper[4810]: W1009 00:18:37.694386 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7650fe90_e7bd_44ff_9eb1_92299bae19be.slice/crio-0fbdd6eeee55bddb4c0f6d3564d0f62ef2e671215bc1d87fd1ebb6d21f225b9f WatchSource:0}: Error finding container 0fbdd6eeee55bddb4c0f6d3564d0f62ef2e671215bc1d87fd1ebb6d21f225b9f: Status 404 returned error can't find the container with id 0fbdd6eeee55bddb4c0f6d3564d0f62ef2e671215bc1d87fd1ebb6d21f225b9f Oct 09 00:18:38 crc kubenswrapper[4810]: I1009 00:18:38.361668 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" event={"ID":"7650fe90-e7bd-44ff-9eb1-92299bae19be","Type":"ContainerStarted","Data":"0fbdd6eeee55bddb4c0f6d3564d0f62ef2e671215bc1d87fd1ebb6d21f225b9f"} Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.138617 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws"] Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.140232 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.143883 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-tkbnq" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.168341 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws"] Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.246045 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg299\" (UniqueName: \"kubernetes.io/projected/30b302fa-b791-4750-91f8-e6ef6898fd08-kube-api-access-kg299\") pod \"cert-manager-cainjector-7d9f95dbf-ts9ws\" (UID: \"30b302fa-b791-4750-91f8-e6ef6898fd08\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.246137 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30b302fa-b791-4750-91f8-e6ef6898fd08-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-ts9ws\" (UID: \"30b302fa-b791-4750-91f8-e6ef6898fd08\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.347116 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg299\" (UniqueName: \"kubernetes.io/projected/30b302fa-b791-4750-91f8-e6ef6898fd08-kube-api-access-kg299\") pod \"cert-manager-cainjector-7d9f95dbf-ts9ws\" (UID: \"30b302fa-b791-4750-91f8-e6ef6898fd08\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.347204 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30b302fa-b791-4750-91f8-e6ef6898fd08-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-ts9ws\" (UID: \"30b302fa-b791-4750-91f8-e6ef6898fd08\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.368165 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/30b302fa-b791-4750-91f8-e6ef6898fd08-bound-sa-token\") pod \"cert-manager-cainjector-7d9f95dbf-ts9ws\" (UID: \"30b302fa-b791-4750-91f8-e6ef6898fd08\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.380752 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg299\" (UniqueName: \"kubernetes.io/projected/30b302fa-b791-4750-91f8-e6ef6898fd08-kube-api-access-kg299\") pod \"cert-manager-cainjector-7d9f95dbf-ts9ws\" (UID: \"30b302fa-b791-4750-91f8-e6ef6898fd08\") " pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.480533 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" Oct 09 00:18:40 crc kubenswrapper[4810]: I1009 00:18:40.935704 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws"] Oct 09 00:18:40 crc kubenswrapper[4810]: W1009 00:18:40.943872 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30b302fa_b791_4750_91f8_e6ef6898fd08.slice/crio-b4ff592f71930a038ee564a989bebbf0e0ad1e718a02092b64dd982d83317f86 WatchSource:0}: Error finding container b4ff592f71930a038ee564a989bebbf0e0ad1e718a02092b64dd982d83317f86: Status 404 returned error can't find the container with id b4ff592f71930a038ee564a989bebbf0e0ad1e718a02092b64dd982d83317f86 Oct 09 00:18:41 crc kubenswrapper[4810]: I1009 00:18:41.390556 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" event={"ID":"30b302fa-b791-4750-91f8-e6ef6898fd08","Type":"ContainerStarted","Data":"b4ff592f71930a038ee564a989bebbf0e0ad1e718a02092b64dd982d83317f86"} Oct 09 00:18:41 crc kubenswrapper[4810]: I1009 00:18:41.894274 4810 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="da88a141-8779-4b30-9ea7-3477d4ad9ee5" containerName="elasticsearch" probeResult="failure" output=< Oct 09 00:18:41 crc kubenswrapper[4810]: {"timestamp": "2025-10-09T00:18:41+00:00", "message": "readiness probe failed", "curl_rc": "7"} Oct 09 00:18:41 crc kubenswrapper[4810]: > Oct 09 00:18:47 crc kubenswrapper[4810]: I1009 00:18:47.165377 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Oct 09 00:18:48 crc kubenswrapper[4810]: I1009 00:18:48.428647 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" event={"ID":"7650fe90-e7bd-44ff-9eb1-92299bae19be","Type":"ContainerStarted","Data":"3839dcf58e4faa30d03c394f0d47ed5475204b9860169e0799427df5b8c7a841"} Oct 09 00:18:48 crc kubenswrapper[4810]: I1009 00:18:48.429847 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:48 crc kubenswrapper[4810]: I1009 00:18:48.431015 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" event={"ID":"30b302fa-b791-4750-91f8-e6ef6898fd08","Type":"ContainerStarted","Data":"a6f1253f6c52d762e309239f15c988f2e3e432df23b22bb3a084648822c634d4"} Oct 09 00:18:48 crc kubenswrapper[4810]: I1009 00:18:48.453485 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" podStartSLOduration=2.396916251 podStartE2EDuration="12.453458615s" podCreationTimestamp="2025-10-09 00:18:36 +0000 UTC" firstStartedPulling="2025-10-09 00:18:37.697152397 +0000 UTC m=+715.222791098" lastFinishedPulling="2025-10-09 00:18:47.753694761 +0000 UTC m=+725.279333462" observedRunningTime="2025-10-09 00:18:48.45019402 +0000 UTC m=+725.975832731" watchObservedRunningTime="2025-10-09 00:18:48.453458615 +0000 UTC m=+725.979097336" Oct 09 00:18:48 crc kubenswrapper[4810]: I1009 00:18:48.467667 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7d9f95dbf-ts9ws" podStartSLOduration=1.660475412 podStartE2EDuration="8.467640015s" podCreationTimestamp="2025-10-09 00:18:40 +0000 UTC" firstStartedPulling="2025-10-09 00:18:40.946470776 +0000 UTC m=+718.472109477" lastFinishedPulling="2025-10-09 00:18:47.753635379 +0000 UTC m=+725.279274080" observedRunningTime="2025-10-09 00:18:48.465608926 +0000 UTC m=+725.991247627" watchObservedRunningTime="2025-10-09 00:18:48.467640015 +0000 UTC m=+725.993278716" Oct 09 00:18:51 crc kubenswrapper[4810]: I1009 00:18:51.183996 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:18:51 crc kubenswrapper[4810]: I1009 00:18:51.184346 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:18:52 crc kubenswrapper[4810]: I1009 00:18:52.127464 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-d969966f-pvdjv" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.378600 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-ld9xm"] Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.379760 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.386643 4810 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-s4zwv" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.405039 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-ld9xm"] Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.479583 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7f31df7d-685a-472a-b6e5-8a31e96a9897-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-ld9xm\" (UID: \"7f31df7d-685a-472a-b6e5-8a31e96a9897\") " pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.479696 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kxmf\" (UniqueName: \"kubernetes.io/projected/7f31df7d-685a-472a-b6e5-8a31e96a9897-kube-api-access-9kxmf\") pod \"cert-manager-7d4cc89fcb-ld9xm\" (UID: \"7f31df7d-685a-472a-b6e5-8a31e96a9897\") " pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.580637 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7f31df7d-685a-472a-b6e5-8a31e96a9897-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-ld9xm\" (UID: \"7f31df7d-685a-472a-b6e5-8a31e96a9897\") " pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.580704 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kxmf\" (UniqueName: \"kubernetes.io/projected/7f31df7d-685a-472a-b6e5-8a31e96a9897-kube-api-access-9kxmf\") pod \"cert-manager-7d4cc89fcb-ld9xm\" (UID: \"7f31df7d-685a-472a-b6e5-8a31e96a9897\") " pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.608522 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kxmf\" (UniqueName: \"kubernetes.io/projected/7f31df7d-685a-472a-b6e5-8a31e96a9897-kube-api-access-9kxmf\") pod \"cert-manager-7d4cc89fcb-ld9xm\" (UID: \"7f31df7d-685a-472a-b6e5-8a31e96a9897\") " pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.611937 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7f31df7d-685a-472a-b6e5-8a31e96a9897-bound-sa-token\") pod \"cert-manager-7d4cc89fcb-ld9xm\" (UID: \"7f31df7d-685a-472a-b6e5-8a31e96a9897\") " pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:56 crc kubenswrapper[4810]: I1009 00:18:56.699457 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" Oct 09 00:18:57 crc kubenswrapper[4810]: W1009 00:18:57.098390 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f31df7d_685a_472a_b6e5_8a31e96a9897.slice/crio-57edfb642e6400728f7f9e521c9323d3c07b805ab9757ce8df1445d39bd1368a WatchSource:0}: Error finding container 57edfb642e6400728f7f9e521c9323d3c07b805ab9757ce8df1445d39bd1368a: Status 404 returned error can't find the container with id 57edfb642e6400728f7f9e521c9323d3c07b805ab9757ce8df1445d39bd1368a Oct 09 00:18:57 crc kubenswrapper[4810]: I1009 00:18:57.101963 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-7d4cc89fcb-ld9xm"] Oct 09 00:18:57 crc kubenswrapper[4810]: I1009 00:18:57.482516 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" event={"ID":"7f31df7d-685a-472a-b6e5-8a31e96a9897","Type":"ContainerStarted","Data":"57edfb642e6400728f7f9e521c9323d3c07b805ab9757ce8df1445d39bd1368a"} Oct 09 00:18:58 crc kubenswrapper[4810]: I1009 00:18:58.494632 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" event={"ID":"7f31df7d-685a-472a-b6e5-8a31e96a9897","Type":"ContainerStarted","Data":"de2cbe60a81732ec316629617487363eb0668c1923fe29dcca90a2c6cd62bcfc"} Oct 09 00:18:58 crc kubenswrapper[4810]: I1009 00:18:58.524907 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-7d4cc89fcb-ld9xm" podStartSLOduration=2.52488171 podStartE2EDuration="2.52488171s" podCreationTimestamp="2025-10-09 00:18:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:18:58.5189957 +0000 UTC m=+736.044634431" watchObservedRunningTime="2025-10-09 00:18:58.52488171 +0000 UTC m=+736.050520451" Oct 09 00:19:14 crc kubenswrapper[4810]: I1009 00:19:14.792107 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mzlgx"] Oct 09 00:19:14 crc kubenswrapper[4810]: I1009 00:19:14.792887 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" podUID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" containerName="controller-manager" containerID="cri-o://3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b" gracePeriod=30 Oct 09 00:19:14 crc kubenswrapper[4810]: I1009 00:19:14.887012 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn"] Oct 09 00:19:14 crc kubenswrapper[4810]: I1009 00:19:14.887454 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" podUID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" containerName="route-controller-manager" containerID="cri-o://e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace" gracePeriod=30 Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.196908 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.244155 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.247673 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmfqs\" (UniqueName: \"kubernetes.io/projected/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-kube-api-access-jmfqs\") pod \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.266519 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-kube-api-access-jmfqs" (OuterVolumeSpecName: "kube-api-access-jmfqs") pod "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" (UID: "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb"). InnerVolumeSpecName "kube-api-access-jmfqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.348770 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-client-ca\") pod \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349132 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-config\") pod \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349177 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-proxy-ca-bundles\") pod \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349197 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9592f7ec-a684-4b31-97b3-32c3439a8ee0-serving-cert\") pod \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349217 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-config\") pod \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349243 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dvkw\" (UniqueName: \"kubernetes.io/projected/9592f7ec-a684-4b31-97b3-32c3439a8ee0-kube-api-access-2dvkw\") pod \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\" (UID: \"9592f7ec-a684-4b31-97b3-32c3439a8ee0\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349265 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-client-ca\") pod \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349295 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-serving-cert\") pod \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\" (UID: \"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb\") " Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349479 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmfqs\" (UniqueName: \"kubernetes.io/projected/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-kube-api-access-jmfqs\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.349800 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-client-ca" (OuterVolumeSpecName: "client-ca") pod "9592f7ec-a684-4b31-97b3-32c3439a8ee0" (UID: "9592f7ec-a684-4b31-97b3-32c3439a8ee0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.350472 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "9592f7ec-a684-4b31-97b3-32c3439a8ee0" (UID: "9592f7ec-a684-4b31-97b3-32c3439a8ee0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.351027 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-client-ca" (OuterVolumeSpecName: "client-ca") pod "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" (UID: "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.351084 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-config" (OuterVolumeSpecName: "config") pod "9592f7ec-a684-4b31-97b3-32c3439a8ee0" (UID: "9592f7ec-a684-4b31-97b3-32c3439a8ee0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.351200 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-config" (OuterVolumeSpecName: "config") pod "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" (UID: "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.353668 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9592f7ec-a684-4b31-97b3-32c3439a8ee0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9592f7ec-a684-4b31-97b3-32c3439a8ee0" (UID: "9592f7ec-a684-4b31-97b3-32c3439a8ee0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.354169 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" (UID: "4a71c1e2-9003-4d7c-ace4-ce4d14b826fb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.358934 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9592f7ec-a684-4b31-97b3-32c3439a8ee0-kube-api-access-2dvkw" (OuterVolumeSpecName: "kube-api-access-2dvkw") pod "9592f7ec-a684-4b31-97b3-32c3439a8ee0" (UID: "9592f7ec-a684-4b31-97b3-32c3439a8ee0"). InnerVolumeSpecName "kube-api-access-2dvkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.450944 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451076 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451115 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451134 4810 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451156 4810 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9592f7ec-a684-4b31-97b3-32c3439a8ee0-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451172 4810 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9592f7ec-a684-4b31-97b3-32c3439a8ee0-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451191 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dvkw\" (UniqueName: \"kubernetes.io/projected/9592f7ec-a684-4b31-97b3-32c3439a8ee0-kube-api-access-2dvkw\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.451209 4810 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.609135 4810 generic.go:334] "Generic (PLEG): container finished" podID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" containerID="3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b" exitCode=0 Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.609375 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" event={"ID":"9592f7ec-a684-4b31-97b3-32c3439a8ee0","Type":"ContainerDied","Data":"3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b"} Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.609537 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.609688 4810 scope.go:117] "RemoveContainer" containerID="3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.609582 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mzlgx" event={"ID":"9592f7ec-a684-4b31-97b3-32c3439a8ee0","Type":"ContainerDied","Data":"2c2ed0b98219b2d2c70ec71a262f9f133cbe2b2a8697572992dcec5e656f9fb5"} Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.611831 4810 generic.go:334] "Generic (PLEG): container finished" podID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" containerID="e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace" exitCode=0 Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.611853 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" event={"ID":"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb","Type":"ContainerDied","Data":"e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace"} Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.611871 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" event={"ID":"4a71c1e2-9003-4d7c-ace4-ce4d14b826fb","Type":"ContainerDied","Data":"8cf01c5c3e6be462a2672aad87504210ae519284d2377f14eb70e975b12dc2f1"} Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.611923 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.627051 4810 scope.go:117] "RemoveContainer" containerID="3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b" Oct 09 00:19:15 crc kubenswrapper[4810]: E1009 00:19:15.627516 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b\": container with ID starting with 3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b not found: ID does not exist" containerID="3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.627717 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b"} err="failed to get container status \"3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b\": rpc error: code = NotFound desc = could not find container \"3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b\": container with ID starting with 3cde72a62ae0b40403435fd93b5c4d1ba1edfc33ea0a37814a3b2f1b9af5a02b not found: ID does not exist" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.627882 4810 scope.go:117] "RemoveContainer" containerID="e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.649629 4810 scope.go:117] "RemoveContainer" containerID="e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace" Oct 09 00:19:15 crc kubenswrapper[4810]: E1009 00:19:15.651966 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace\": container with ID starting with e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace not found: ID does not exist" containerID="e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.652050 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace"} err="failed to get container status \"e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace\": rpc error: code = NotFound desc = could not find container \"e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace\": container with ID starting with e30555f2f2e0c4589c6fe8e485a1c5dae3796d6766c981cfbf987a177c61aace not found: ID does not exist" Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.653899 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn"] Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.663888 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rvkhn"] Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.668528 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mzlgx"] Oct 09 00:19:15 crc kubenswrapper[4810]: I1009 00:19:15.673086 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mzlgx"] Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.004299 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-fd89bb586-7l8sn"] Oct 09 00:19:16 crc kubenswrapper[4810]: E1009 00:19:16.004519 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" containerName="controller-manager" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.004531 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" containerName="controller-manager" Oct 09 00:19:16 crc kubenswrapper[4810]: E1009 00:19:16.004543 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" containerName="route-controller-manager" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.004549 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" containerName="route-controller-manager" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.004647 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" containerName="controller-manager" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.004662 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" containerName="route-controller-manager" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.005090 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.006987 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.007177 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.007456 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.007509 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.007943 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.008176 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.016978 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.019890 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx"] Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.020933 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.023585 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.023585 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.023893 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.023955 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.024996 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.025041 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fd89bb586-7l8sn"] Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.025157 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.036102 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx"] Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060023 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-serving-cert\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060087 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkgp4\" (UniqueName: \"kubernetes.io/projected/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-kube-api-access-wkgp4\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060109 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-client-ca\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060149 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-client-ca\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060167 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-config\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060188 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h56kk\" (UniqueName: \"kubernetes.io/projected/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-kube-api-access-h56kk\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-proxy-ca-bundles\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060223 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-serving-cert\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.060247 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-config\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.160968 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-client-ca\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161014 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-config\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161050 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h56kk\" (UniqueName: \"kubernetes.io/projected/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-kube-api-access-h56kk\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161083 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-serving-cert\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161107 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-proxy-ca-bundles\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161144 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-config\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161190 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-serving-cert\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161230 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkgp4\" (UniqueName: \"kubernetes.io/projected/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-kube-api-access-wkgp4\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.161250 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-client-ca\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.162743 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-client-ca\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.162917 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-config\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.164172 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-proxy-ca-bundles\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.164308 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-config\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.164440 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-serving-cert\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.164530 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-client-ca\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.165454 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-serving-cert\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.175889 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h56kk\" (UniqueName: \"kubernetes.io/projected/d613c99a-8d21-4e75-a450-d3b01d4ba1ff-kube-api-access-h56kk\") pod \"controller-manager-fd89bb586-7l8sn\" (UID: \"d613c99a-8d21-4e75-a450-d3b01d4ba1ff\") " pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.178877 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkgp4\" (UniqueName: \"kubernetes.io/projected/6abb9ee4-a3a7-4fa7-bb52-454682bb810b-kube-api-access-wkgp4\") pod \"route-controller-manager-7486f5bb9d-lk9qx\" (UID: \"6abb9ee4-a3a7-4fa7-bb52-454682bb810b\") " pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.319609 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.335493 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.552519 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx"] Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.624403 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" event={"ID":"6abb9ee4-a3a7-4fa7-bb52-454682bb810b","Type":"ContainerStarted","Data":"63b1578f7eea500409b08bfc00480a088aa8f20db298fb8670a012eb93df01a6"} Oct 09 00:19:16 crc kubenswrapper[4810]: I1009 00:19:16.828906 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-fd89bb586-7l8sn"] Oct 09 00:19:16 crc kubenswrapper[4810]: W1009 00:19:16.842699 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd613c99a_8d21_4e75_a450_d3b01d4ba1ff.slice/crio-87eb547140fed40f18438a9d36ce293d61ecdb02e6e724a0f1dd7858cd0a8c73 WatchSource:0}: Error finding container 87eb547140fed40f18438a9d36ce293d61ecdb02e6e724a0f1dd7858cd0a8c73: Status 404 returned error can't find the container with id 87eb547140fed40f18438a9d36ce293d61ecdb02e6e724a0f1dd7858cd0a8c73 Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.263252 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a71c1e2-9003-4d7c-ace4-ce4d14b826fb" path="/var/lib/kubelet/pods/4a71c1e2-9003-4d7c-ace4-ce4d14b826fb/volumes" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.264437 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9592f7ec-a684-4b31-97b3-32c3439a8ee0" path="/var/lib/kubelet/pods/9592f7ec-a684-4b31-97b3-32c3439a8ee0/volumes" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.629870 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" event={"ID":"d613c99a-8d21-4e75-a450-d3b01d4ba1ff","Type":"ContainerStarted","Data":"cab53459c40468652515a8380683c23e663dd8963f7e138adf07a01957bc4525"} Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.629913 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" event={"ID":"d613c99a-8d21-4e75-a450-d3b01d4ba1ff","Type":"ContainerStarted","Data":"87eb547140fed40f18438a9d36ce293d61ecdb02e6e724a0f1dd7858cd0a8c73"} Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.630975 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.632212 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" event={"ID":"6abb9ee4-a3a7-4fa7-bb52-454682bb810b","Type":"ContainerStarted","Data":"e685afdab2295261142e70824151be28f680846920bc0aff7841446c94c924b5"} Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.632696 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.635337 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.640123 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.666218 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-fd89bb586-7l8sn" podStartSLOduration=2.666194365 podStartE2EDuration="2.666194365s" podCreationTimestamp="2025-10-09 00:19:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:19:17.650224043 +0000 UTC m=+755.175862764" watchObservedRunningTime="2025-10-09 00:19:17.666194365 +0000 UTC m=+755.191833086" Oct 09 00:19:17 crc kubenswrapper[4810]: I1009 00:19:17.689386 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7486f5bb9d-lk9qx" podStartSLOduration=2.689362985 podStartE2EDuration="2.689362985s" podCreationTimestamp="2025-10-09 00:19:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:19:17.68884937 +0000 UTC m=+755.214488081" watchObservedRunningTime="2025-10-09 00:19:17.689362985 +0000 UTC m=+755.215001686" Oct 09 00:19:21 crc kubenswrapper[4810]: I1009 00:19:21.184680 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:19:21 crc kubenswrapper[4810]: I1009 00:19:21.185274 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:19:21 crc kubenswrapper[4810]: I1009 00:19:21.185342 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:19:21 crc kubenswrapper[4810]: I1009 00:19:21.186204 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9655c39bfc737814bdf380a44432c260858cbf7ed04ea57c70188208f88269af"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:19:21 crc kubenswrapper[4810]: I1009 00:19:21.186266 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://9655c39bfc737814bdf380a44432c260858cbf7ed04ea57c70188208f88269af" gracePeriod=600 Oct 09 00:19:23 crc kubenswrapper[4810]: I1009 00:19:23.670430 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="9655c39bfc737814bdf380a44432c260858cbf7ed04ea57c70188208f88269af" exitCode=0 Oct 09 00:19:23 crc kubenswrapper[4810]: I1009 00:19:23.670534 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"9655c39bfc737814bdf380a44432c260858cbf7ed04ea57c70188208f88269af"} Oct 09 00:19:23 crc kubenswrapper[4810]: I1009 00:19:23.670977 4810 scope.go:117] "RemoveContainer" containerID="966edc9b311c0a1dc5d942427ebcd50c633f330f904d8cc5143b722007893d59" Oct 09 00:19:24 crc kubenswrapper[4810]: I1009 00:19:24.683748 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"5efb6650f06feac4015d4c59a677ab43187f3e6f6c0ee6ea74bc01256cb113d3"} Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.320296 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b9sq6"] Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.322239 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.332705 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b9sq6"] Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.504898 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-utilities\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.504942 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-catalog-content\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.504970 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chzjk\" (UniqueName: \"kubernetes.io/projected/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-kube-api-access-chzjk\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.605932 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-utilities\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.605971 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-catalog-content\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.605994 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chzjk\" (UniqueName: \"kubernetes.io/projected/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-kube-api-access-chzjk\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.606930 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-utilities\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.606957 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-catalog-content\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.632482 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chzjk\" (UniqueName: \"kubernetes.io/projected/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-kube-api-access-chzjk\") pod \"community-operators-b9sq6\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:00 crc kubenswrapper[4810]: I1009 00:20:00.641845 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:01 crc kubenswrapper[4810]: I1009 00:20:01.098639 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b9sq6"] Oct 09 00:20:01 crc kubenswrapper[4810]: I1009 00:20:01.979641 4810 generic.go:334] "Generic (PLEG): container finished" podID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerID="f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2" exitCode=0 Oct 09 00:20:01 crc kubenswrapper[4810]: I1009 00:20:01.979715 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerDied","Data":"f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2"} Oct 09 00:20:01 crc kubenswrapper[4810]: I1009 00:20:01.980380 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerStarted","Data":"6ead0effbeb87b6ad58184e580bf1bef1f2fc2f512b6818a559370874f1dfd36"} Oct 09 00:20:02 crc kubenswrapper[4810]: I1009 00:20:02.988106 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerStarted","Data":"b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1"} Oct 09 00:20:03 crc kubenswrapper[4810]: I1009 00:20:03.997779 4810 generic.go:334] "Generic (PLEG): container finished" podID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerID="b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1" exitCode=0 Oct 09 00:20:04 crc kubenswrapper[4810]: I1009 00:20:03.997937 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerDied","Data":"b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1"} Oct 09 00:20:05 crc kubenswrapper[4810]: I1009 00:20:05.009968 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerStarted","Data":"182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2"} Oct 09 00:20:05 crc kubenswrapper[4810]: I1009 00:20:05.032647 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b9sq6" podStartSLOduration=2.515295997 podStartE2EDuration="5.032620911s" podCreationTimestamp="2025-10-09 00:20:00 +0000 UTC" firstStartedPulling="2025-10-09 00:20:01.982928637 +0000 UTC m=+799.508567378" lastFinishedPulling="2025-10-09 00:20:04.500253591 +0000 UTC m=+802.025892292" observedRunningTime="2025-10-09 00:20:05.028313237 +0000 UTC m=+802.553951968" watchObservedRunningTime="2025-10-09 00:20:05.032620911 +0000 UTC m=+802.558259652" Oct 09 00:20:10 crc kubenswrapper[4810]: I1009 00:20:10.642299 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:10 crc kubenswrapper[4810]: I1009 00:20:10.643342 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:10 crc kubenswrapper[4810]: I1009 00:20:10.708596 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:11 crc kubenswrapper[4810]: I1009 00:20:11.070025 4810 generic.go:334] "Generic (PLEG): container finished" podID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerID="551fa9d937333f5e729c049f3a12ac7999fea347a035f5d04257bd4254cddcd6" exitCode=0 Oct 09 00:20:11 crc kubenswrapper[4810]: I1009 00:20:11.070150 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerDied","Data":"551fa9d937333f5e729c049f3a12ac7999fea347a035f5d04257bd4254cddcd6"} Oct 09 00:20:11 crc kubenswrapper[4810]: I1009 00:20:11.130032 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:11 crc kubenswrapper[4810]: I1009 00:20:11.194288 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b9sq6"] Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.407464 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514414 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-pull\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514509 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-system-configs\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514564 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-run\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514584 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-node-pullsecrets\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514647 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildcachedir\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514678 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-root\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514751 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-proxy-ca-bundles\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514848 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-push\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514895 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-blob-cache\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514946 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-ca-bundles\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.514976 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildworkdir\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.515038 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwd5m\" (UniqueName: \"kubernetes.io/projected/5555ec6f-be4d-40a7-81b7-d542809f29ca-kube-api-access-xwd5m\") pod \"5555ec6f-be4d-40a7-81b7-d542809f29ca\" (UID: \"5555ec6f-be4d-40a7-81b7-d542809f29ca\") " Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.515534 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.515551 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.516416 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.517100 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.517336 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.520958 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.522739 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.525380 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5555ec6f-be4d-40a7-81b7-d542809f29ca-kube-api-access-xwd5m" (OuterVolumeSpecName: "kube-api-access-xwd5m") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "kube-api-access-xwd5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.526685 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.551267 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.616938 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.616980 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.616998 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617016 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617035 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617052 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617068 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617085 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617101 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwd5m\" (UniqueName: \"kubernetes.io/projected/5555ec6f-be4d-40a7-81b7-d542809f29ca-kube-api-access-xwd5m\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.617118 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/5555ec6f-be4d-40a7-81b7-d542809f29ca-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.708224 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:12 crc kubenswrapper[4810]: I1009 00:20:12.718269 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:13 crc kubenswrapper[4810]: I1009 00:20:13.086423 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"5555ec6f-be4d-40a7-81b7-d542809f29ca","Type":"ContainerDied","Data":"386f3e98000847d0ef2318c0a094575bcbba6f94c6dff30e623b52d905d0baae"} Oct 09 00:20:13 crc kubenswrapper[4810]: I1009 00:20:13.086458 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Oct 09 00:20:13 crc kubenswrapper[4810]: I1009 00:20:13.086481 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="386f3e98000847d0ef2318c0a094575bcbba6f94c6dff30e623b52d905d0baae" Oct 09 00:20:13 crc kubenswrapper[4810]: I1009 00:20:13.086576 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b9sq6" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="registry-server" containerID="cri-o://182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2" gracePeriod=2 Oct 09 00:20:13 crc kubenswrapper[4810]: I1009 00:20:13.981976 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.096957 4810 generic.go:334] "Generic (PLEG): container finished" podID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerID="182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2" exitCode=0 Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.097032 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9sq6" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.097038 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerDied","Data":"182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2"} Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.097080 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9sq6" event={"ID":"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19","Type":"ContainerDied","Data":"6ead0effbeb87b6ad58184e580bf1bef1f2fc2f512b6818a559370874f1dfd36"} Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.097098 4810 scope.go:117] "RemoveContainer" containerID="182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.116327 4810 scope.go:117] "RemoveContainer" containerID="b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.137488 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chzjk\" (UniqueName: \"kubernetes.io/projected/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-kube-api-access-chzjk\") pod \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.137562 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-catalog-content\") pod \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.137686 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-utilities\") pod \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\" (UID: \"2d98ff9a-13a4-4ad4-875d-6a5f921fdf19\") " Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.138956 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-utilities" (OuterVolumeSpecName: "utilities") pod "2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" (UID: "2d98ff9a-13a4-4ad4-875d-6a5f921fdf19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.145116 4810 scope.go:117] "RemoveContainer" containerID="f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.149957 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-kube-api-access-chzjk" (OuterVolumeSpecName: "kube-api-access-chzjk") pod "2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" (UID: "2d98ff9a-13a4-4ad4-875d-6a5f921fdf19"). InnerVolumeSpecName "kube-api-access-chzjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.184334 4810 scope.go:117] "RemoveContainer" containerID="182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2" Oct 09 00:20:14 crc kubenswrapper[4810]: E1009 00:20:14.184876 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2\": container with ID starting with 182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2 not found: ID does not exist" containerID="182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.184920 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2"} err="failed to get container status \"182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2\": rpc error: code = NotFound desc = could not find container \"182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2\": container with ID starting with 182b5af50c5513d327d5573bdd980ccf80c1b48a2944975e68bad009d3b91ef2 not found: ID does not exist" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.184952 4810 scope.go:117] "RemoveContainer" containerID="b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1" Oct 09 00:20:14 crc kubenswrapper[4810]: E1009 00:20:14.185280 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1\": container with ID starting with b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1 not found: ID does not exist" containerID="b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.185315 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1"} err="failed to get container status \"b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1\": rpc error: code = NotFound desc = could not find container \"b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1\": container with ID starting with b9485890c5a44f50daa803de40eedcccda2da29e7a26f10ca3191961d1726cf1 not found: ID does not exist" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.185335 4810 scope.go:117] "RemoveContainer" containerID="f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2" Oct 09 00:20:14 crc kubenswrapper[4810]: E1009 00:20:14.185624 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2\": container with ID starting with f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2 not found: ID does not exist" containerID="f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.185655 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2"} err="failed to get container status \"f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2\": rpc error: code = NotFound desc = could not find container \"f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2\": container with ID starting with f957a3a059193b24c8059db563e863820e8b448ef6fb047fb31fd417536406d2 not found: ID does not exist" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.205355 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" (UID: "2d98ff9a-13a4-4ad4-875d-6a5f921fdf19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.239219 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chzjk\" (UniqueName: \"kubernetes.io/projected/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-kube-api-access-chzjk\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.239249 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.239354 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.447125 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b9sq6"] Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.451646 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b9sq6"] Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.954321 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "5555ec6f-be4d-40a7-81b7-d542809f29ca" (UID: "5555ec6f-be4d-40a7-81b7-d542809f29ca"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:14 crc kubenswrapper[4810]: I1009 00:20:14.958063 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5555ec6f-be4d-40a7-81b7-d542809f29ca-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:15 crc kubenswrapper[4810]: I1009 00:20:15.268867 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" path="/var/lib/kubelet/pods/2d98ff9a-13a4-4ad4-875d-6a5f921fdf19/volumes" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.869947 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 09 00:20:16 crc kubenswrapper[4810]: E1009 00:20:16.870240 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="docker-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870259 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="docker-build" Oct 09 00:20:16 crc kubenswrapper[4810]: E1009 00:20:16.870276 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="extract-utilities" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870288 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="extract-utilities" Oct 09 00:20:16 crc kubenswrapper[4810]: E1009 00:20:16.870305 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="manage-dockerfile" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870317 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="manage-dockerfile" Oct 09 00:20:16 crc kubenswrapper[4810]: E1009 00:20:16.870344 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="registry-server" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870354 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="registry-server" Oct 09 00:20:16 crc kubenswrapper[4810]: E1009 00:20:16.870368 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="git-clone" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870376 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="git-clone" Oct 09 00:20:16 crc kubenswrapper[4810]: E1009 00:20:16.870391 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="extract-content" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870398 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="extract-content" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870519 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d98ff9a-13a4-4ad4-875d-6a5f921fdf19" containerName="registry-server" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.870537 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="5555ec6f-be4d-40a7-81b7-d542809f29ca" containerName="docker-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.871370 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.873998 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-global-ca" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.874499 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.874682 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-ca" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.875628 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-sys-config" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.884627 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.983993 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984043 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984069 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984090 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984111 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984272 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk247\" (UniqueName: \"kubernetes.io/projected/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-kube-api-access-qk247\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984565 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984691 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984787 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.984905 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.985018 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:16 crc kubenswrapper[4810]: I1009 00:20:16.985058 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086162 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086233 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086269 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086303 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086335 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086375 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk247\" (UniqueName: \"kubernetes.io/projected/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-kube-api-access-qk247\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086466 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086691 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.086811 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087212 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087275 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087320 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087379 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087376 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087410 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087313 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087434 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087755 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.088198 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.088907 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.087652 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.092382 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.097320 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.103147 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk247\" (UniqueName: \"kubernetes.io/projected/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-kube-api-access-qk247\") pod \"smart-gateway-operator-1-build\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.184374 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:17 crc kubenswrapper[4810]: I1009 00:20:17.599787 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 09 00:20:18 crc kubenswrapper[4810]: I1009 00:20:18.128032 4810 generic.go:334] "Generic (PLEG): container finished" podID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerID="979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144" exitCode=0 Oct 09 00:20:18 crc kubenswrapper[4810]: I1009 00:20:18.128146 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"695de44f-8f4b-4cf9-8eb8-5a6958865ce0","Type":"ContainerDied","Data":"979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144"} Oct 09 00:20:18 crc kubenswrapper[4810]: I1009 00:20:18.129712 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"695de44f-8f4b-4cf9-8eb8-5a6958865ce0","Type":"ContainerStarted","Data":"b8092726cacfacbec508ae8d49ef74f741a5bc88d8ee938dac6f4816a30e5d61"} Oct 09 00:20:19 crc kubenswrapper[4810]: I1009 00:20:19.139429 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"695de44f-8f4b-4cf9-8eb8-5a6958865ce0","Type":"ContainerStarted","Data":"5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d"} Oct 09 00:20:19 crc kubenswrapper[4810]: I1009 00:20:19.171216 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-1-build" podStartSLOduration=3.171194954 podStartE2EDuration="3.171194954s" podCreationTimestamp="2025-10-09 00:20:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:20:19.168542898 +0000 UTC m=+816.694181609" watchObservedRunningTime="2025-10-09 00:20:19.171194954 +0000 UTC m=+816.696833665" Oct 09 00:20:24 crc kubenswrapper[4810]: I1009 00:20:24.671685 4810 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 00:20:27 crc kubenswrapper[4810]: I1009 00:20:27.661119 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 09 00:20:27 crc kubenswrapper[4810]: I1009 00:20:27.661783 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/smart-gateway-operator-1-build" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerName="docker-build" containerID="cri-o://5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d" gracePeriod=30 Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.630794 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_695de44f-8f4b-4cf9-8eb8-5a6958865ce0/docker-build/0.log" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.631371 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.748907 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qk247\" (UniqueName: \"kubernetes.io/projected/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-kube-api-access-qk247\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749004 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildworkdir\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749026 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildcachedir\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749103 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-blob-cache\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749131 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-push\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749168 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-node-pullsecrets\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749211 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-system-configs\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749240 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-proxy-ca-bundles\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749239 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749269 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-root\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749294 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-ca-bundles\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749335 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-run\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749359 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-pull\") pod \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\" (UID: \"695de44f-8f4b-4cf9-8eb8-5a6958865ce0\") " Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749642 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.749937 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.750070 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.750468 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.752643 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.753150 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.755225 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.755349 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.755407 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-kube-api-access-qk247" (OuterVolumeSpecName: "kube-api-access-qk247") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "kube-api-access-qk247". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.755679 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851003 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851049 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851061 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851073 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851085 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851095 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qk247\" (UniqueName: \"kubernetes.io/projected/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-kube-api-access-qk247\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851106 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851117 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.851128 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.927570 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:28 crc kubenswrapper[4810]: I1009 00:20:28.952420 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.211015 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_695de44f-8f4b-4cf9-8eb8-5a6958865ce0/docker-build/0.log" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.211857 4810 generic.go:334] "Generic (PLEG): container finished" podID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerID="5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d" exitCode=1 Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.211926 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"695de44f-8f4b-4cf9-8eb8-5a6958865ce0","Type":"ContainerDied","Data":"5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d"} Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.211950 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.211975 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"695de44f-8f4b-4cf9-8eb8-5a6958865ce0","Type":"ContainerDied","Data":"b8092726cacfacbec508ae8d49ef74f741a5bc88d8ee938dac6f4816a30e5d61"} Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.212012 4810 scope.go:117] "RemoveContainer" containerID="5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.264611 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "695de44f-8f4b-4cf9-8eb8-5a6958865ce0" (UID: "695de44f-8f4b-4cf9-8eb8-5a6958865ce0"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.298741 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Oct 09 00:20:29 crc kubenswrapper[4810]: E1009 00:20:29.299077 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerName="manage-dockerfile" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.299093 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerName="manage-dockerfile" Oct 09 00:20:29 crc kubenswrapper[4810]: E1009 00:20:29.299112 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerName="docker-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.299120 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerName="docker-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.299264 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" containerName="docker-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.304933 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.313296 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-sys-config" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.323579 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-global-ca" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.323535 4810 scope.go:117] "RemoveContainer" containerID="979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.323628 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-ca" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.341027 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.359778 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/695de44f-8f4b-4cf9-8eb8-5a6958865ce0-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.367460 4810 scope.go:117] "RemoveContainer" containerID="5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d" Oct 09 00:20:29 crc kubenswrapper[4810]: E1009 00:20:29.368241 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d\": container with ID starting with 5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d not found: ID does not exist" containerID="5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.368365 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d"} err="failed to get container status \"5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d\": rpc error: code = NotFound desc = could not find container \"5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d\": container with ID starting with 5254da3c3b6ea422ae3272ce6dd967da68f593fad70281fe407f9dcf117efd2d not found: ID does not exist" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.368470 4810 scope.go:117] "RemoveContainer" containerID="979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144" Oct 09 00:20:29 crc kubenswrapper[4810]: E1009 00:20:29.368935 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144\": container with ID starting with 979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144 not found: ID does not exist" containerID="979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.369051 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144"} err="failed to get container status \"979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144\": rpc error: code = NotFound desc = could not find container \"979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144\": container with ID starting with 979756fc5118297f7dbcd7a5819bf8c69c2712cdd2bed9aa98cb20b73cf04144 not found: ID does not exist" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.461659 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462086 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462204 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkvvt\" (UniqueName: \"kubernetes.io/projected/20c3a267-4124-401c-9be2-69065acf4767-kube-api-access-pkvvt\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462308 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462426 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462538 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462649 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462797 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.462966 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.463281 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.463400 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.463556 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.545598 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.556787 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.564957 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.565427 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.565653 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.565861 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566097 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566320 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566331 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566408 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566434 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566138 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566588 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566658 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566885 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567018 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkvvt\" (UniqueName: \"kubernetes.io/projected/20c3a267-4124-401c-9be2-69065acf4767-kube-api-access-pkvvt\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567097 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.566913 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567185 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567211 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567628 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567947 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.567955 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.578307 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.578314 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.629990 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkvvt\" (UniqueName: \"kubernetes.io/projected/20c3a267-4124-401c-9be2-69065acf4767-kube-api-access-pkvvt\") pod \"smart-gateway-operator-2-build\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:29 crc kubenswrapper[4810]: I1009 00:20:29.638280 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:20:30 crc kubenswrapper[4810]: I1009 00:20:30.063501 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Oct 09 00:20:30 crc kubenswrapper[4810]: I1009 00:20:30.221558 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerStarted","Data":"aaa7855efd3ef91596b4ead9483f467884f4ccaa11c872188ee8ba28f65188ce"} Oct 09 00:20:31 crc kubenswrapper[4810]: I1009 00:20:31.234553 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerStarted","Data":"69ca03601fd87516bccb94a7dc21061dd8c85732ed6d1025fe7fcb084dac875a"} Oct 09 00:20:31 crc kubenswrapper[4810]: I1009 00:20:31.264902 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="695de44f-8f4b-4cf9-8eb8-5a6958865ce0" path="/var/lib/kubelet/pods/695de44f-8f4b-4cf9-8eb8-5a6958865ce0/volumes" Oct 09 00:20:32 crc kubenswrapper[4810]: I1009 00:20:32.245483 4810 generic.go:334] "Generic (PLEG): container finished" podID="20c3a267-4124-401c-9be2-69065acf4767" containerID="69ca03601fd87516bccb94a7dc21061dd8c85732ed6d1025fe7fcb084dac875a" exitCode=0 Oct 09 00:20:32 crc kubenswrapper[4810]: I1009 00:20:32.245529 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerDied","Data":"69ca03601fd87516bccb94a7dc21061dd8c85732ed6d1025fe7fcb084dac875a"} Oct 09 00:20:33 crc kubenswrapper[4810]: I1009 00:20:33.257965 4810 generic.go:334] "Generic (PLEG): container finished" podID="20c3a267-4124-401c-9be2-69065acf4767" containerID="456c1a5db3450f4091a5e831d044bd26a4d69a0039d05a0bcce5215ea5636d55" exitCode=0 Oct 09 00:20:33 crc kubenswrapper[4810]: I1009 00:20:33.262681 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerDied","Data":"456c1a5db3450f4091a5e831d044bd26a4d69a0039d05a0bcce5215ea5636d55"} Oct 09 00:20:33 crc kubenswrapper[4810]: I1009 00:20:33.319512 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_20c3a267-4124-401c-9be2-69065acf4767/manage-dockerfile/0.log" Oct 09 00:20:34 crc kubenswrapper[4810]: I1009 00:20:34.268481 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerStarted","Data":"43e1573849a5280b1b11cad5074c355bdaec81daa992111e0703c34ed9e91367"} Oct 09 00:20:34 crc kubenswrapper[4810]: I1009 00:20:34.304793 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-2-build" podStartSLOduration=5.304766221 podStartE2EDuration="5.304766221s" podCreationTimestamp="2025-10-09 00:20:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:20:34.297765448 +0000 UTC m=+831.823404199" watchObservedRunningTime="2025-10-09 00:20:34.304766221 +0000 UTC m=+831.830404982" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.428978 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h2tmp"] Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.430575 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.456727 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h2tmp"] Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.512546 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-utilities\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.512610 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsfjm\" (UniqueName: \"kubernetes.io/projected/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-kube-api-access-jsfjm\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.512631 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-catalog-content\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.614029 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-utilities\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.614092 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsfjm\" (UniqueName: \"kubernetes.io/projected/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-kube-api-access-jsfjm\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.614116 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-catalog-content\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.614498 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-catalog-content\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.614633 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-utilities\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.634643 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsfjm\" (UniqueName: \"kubernetes.io/projected/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-kube-api-access-jsfjm\") pod \"redhat-operators-h2tmp\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:39 crc kubenswrapper[4810]: I1009 00:21:39.754757 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:40 crc kubenswrapper[4810]: I1009 00:21:40.002186 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h2tmp"] Oct 09 00:21:40 crc kubenswrapper[4810]: W1009 00:21:40.021206 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6291bcc_cef7_4c39_afd1_81f7f9b3f1ba.slice/crio-13e29c5d67b143fbac2e07760c0be25e6d4cdf1c6f770403f285797af9d096c2 WatchSource:0}: Error finding container 13e29c5d67b143fbac2e07760c0be25e6d4cdf1c6f770403f285797af9d096c2: Status 404 returned error can't find the container with id 13e29c5d67b143fbac2e07760c0be25e6d4cdf1c6f770403f285797af9d096c2 Oct 09 00:21:40 crc kubenswrapper[4810]: I1009 00:21:40.712348 4810 generic.go:334] "Generic (PLEG): container finished" podID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerID="10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05" exitCode=0 Oct 09 00:21:40 crc kubenswrapper[4810]: I1009 00:21:40.712409 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerDied","Data":"10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05"} Oct 09 00:21:40 crc kubenswrapper[4810]: I1009 00:21:40.712480 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerStarted","Data":"13e29c5d67b143fbac2e07760c0be25e6d4cdf1c6f770403f285797af9d096c2"} Oct 09 00:21:41 crc kubenswrapper[4810]: I1009 00:21:41.718309 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerStarted","Data":"1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13"} Oct 09 00:21:42 crc kubenswrapper[4810]: I1009 00:21:42.726398 4810 generic.go:334] "Generic (PLEG): container finished" podID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerID="1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13" exitCode=0 Oct 09 00:21:42 crc kubenswrapper[4810]: I1009 00:21:42.726500 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerDied","Data":"1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13"} Oct 09 00:21:43 crc kubenswrapper[4810]: I1009 00:21:43.734712 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerStarted","Data":"55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6"} Oct 09 00:21:43 crc kubenswrapper[4810]: I1009 00:21:43.762608 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h2tmp" podStartSLOduration=2.2718226870000002 podStartE2EDuration="4.762590151s" podCreationTimestamp="2025-10-09 00:21:39 +0000 UTC" firstStartedPulling="2025-10-09 00:21:40.714693866 +0000 UTC m=+898.240332557" lastFinishedPulling="2025-10-09 00:21:43.20546132 +0000 UTC m=+900.731100021" observedRunningTime="2025-10-09 00:21:43.761592772 +0000 UTC m=+901.287231493" watchObservedRunningTime="2025-10-09 00:21:43.762590151 +0000 UTC m=+901.288228862" Oct 09 00:21:49 crc kubenswrapper[4810]: I1009 00:21:49.755800 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:49 crc kubenswrapper[4810]: I1009 00:21:49.756327 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:49 crc kubenswrapper[4810]: I1009 00:21:49.797706 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:50 crc kubenswrapper[4810]: I1009 00:21:50.785808 4810 generic.go:334] "Generic (PLEG): container finished" podID="20c3a267-4124-401c-9be2-69065acf4767" containerID="43e1573849a5280b1b11cad5074c355bdaec81daa992111e0703c34ed9e91367" exitCode=0 Oct 09 00:21:50 crc kubenswrapper[4810]: I1009 00:21:50.785895 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerDied","Data":"43e1573849a5280b1b11cad5074c355bdaec81daa992111e0703c34ed9e91367"} Oct 09 00:21:50 crc kubenswrapper[4810]: I1009 00:21:50.855905 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:50 crc kubenswrapper[4810]: I1009 00:21:50.916411 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h2tmp"] Oct 09 00:21:51 crc kubenswrapper[4810]: I1009 00:21:51.184904 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:21:51 crc kubenswrapper[4810]: I1009 00:21:51.184972 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.051237 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.190705 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkvvt\" (UniqueName: \"kubernetes.io/projected/20c3a267-4124-401c-9be2-69065acf4767-kube-api-access-pkvvt\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.190802 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-run\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.190894 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-root\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.190967 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-build-blob-cache\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.191048 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-buildworkdir\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.191226 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-push\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.192389 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.200230 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.200405 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-proxy-ca-bundles\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.201477 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.201701 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-node-pullsecrets\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.201782 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.201944 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.202003 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-buildcachedir\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.202071 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-ca-bundles\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.202737 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.202853 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203015 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20c3a267-4124-401c-9be2-69065acf4767-kube-api-access-pkvvt" (OuterVolumeSpecName: "kube-api-access-pkvvt") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "kube-api-access-pkvvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203159 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203230 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-system-configs\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203317 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-pull\") pod \"20c3a267-4124-401c-9be2-69065acf4767\" (UID: \"20c3a267-4124-401c-9be2-69065acf4767\") " Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203858 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkvvt\" (UniqueName: \"kubernetes.io/projected/20c3a267-4124-401c-9be2-69065acf4767-kube-api-access-pkvvt\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203893 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203911 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203932 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203951 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203969 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.203986 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/20c3a267-4124-401c-9be2-69065acf4767-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.204003 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.204019 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/20c3a267-4124-401c-9be2-69065acf4767-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.207758 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.305230 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/20c3a267-4124-401c-9be2-69065acf4767-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.380917 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.406469 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.826855 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"20c3a267-4124-401c-9be2-69065acf4767","Type":"ContainerDied","Data":"aaa7855efd3ef91596b4ead9483f467884f4ccaa11c872188ee8ba28f65188ce"} Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.827121 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaa7855efd3ef91596b4ead9483f467884f4ccaa11c872188ee8ba28f65188ce" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.826912 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Oct 09 00:21:52 crc kubenswrapper[4810]: I1009 00:21:52.826881 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h2tmp" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="registry-server" containerID="cri-o://55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6" gracePeriod=2 Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.348569 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.476514 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-utilities\") pod \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.476666 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-catalog-content\") pod \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.476778 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsfjm\" (UniqueName: \"kubernetes.io/projected/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-kube-api-access-jsfjm\") pod \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\" (UID: \"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba\") " Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.480967 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-utilities" (OuterVolumeSpecName: "utilities") pod "d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" (UID: "d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.481775 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-kube-api-access-jsfjm" (OuterVolumeSpecName: "kube-api-access-jsfjm") pod "d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" (UID: "d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba"). InnerVolumeSpecName "kube-api-access-jsfjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.576026 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" (UID: "d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.578052 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsfjm\" (UniqueName: \"kubernetes.io/projected/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-kube-api-access-jsfjm\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.578090 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.578100 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.840482 4810 generic.go:334] "Generic (PLEG): container finished" podID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerID="55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6" exitCode=0 Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.840553 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerDied","Data":"55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6"} Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.840592 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h2tmp" event={"ID":"d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba","Type":"ContainerDied","Data":"13e29c5d67b143fbac2e07760c0be25e6d4cdf1c6f770403f285797af9d096c2"} Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.840622 4810 scope.go:117] "RemoveContainer" containerID="55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.840814 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h2tmp" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.861648 4810 scope.go:117] "RemoveContainer" containerID="1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.887654 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "20c3a267-4124-401c-9be2-69065acf4767" (UID: "20c3a267-4124-401c-9be2-69065acf4767"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.891889 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h2tmp"] Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.894138 4810 scope.go:117] "RemoveContainer" containerID="10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.901632 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h2tmp"] Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.913298 4810 scope.go:117] "RemoveContainer" containerID="55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6" Oct 09 00:21:54 crc kubenswrapper[4810]: E1009 00:21:54.913758 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6\": container with ID starting with 55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6 not found: ID does not exist" containerID="55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.913787 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6"} err="failed to get container status \"55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6\": rpc error: code = NotFound desc = could not find container \"55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6\": container with ID starting with 55fc80cadfc1a6febb16a0e5b2864a207979a6c62dda72d5bf4ca415fcfaa6d6 not found: ID does not exist" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.913809 4810 scope.go:117] "RemoveContainer" containerID="1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13" Oct 09 00:21:54 crc kubenswrapper[4810]: E1009 00:21:54.914410 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13\": container with ID starting with 1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13 not found: ID does not exist" containerID="1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.914429 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13"} err="failed to get container status \"1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13\": rpc error: code = NotFound desc = could not find container \"1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13\": container with ID starting with 1b91919675f83ebeb71d1b8c5383e19b230cd36268a02a8b6211b65376e93b13 not found: ID does not exist" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.914445 4810 scope.go:117] "RemoveContainer" containerID="10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05" Oct 09 00:21:54 crc kubenswrapper[4810]: E1009 00:21:54.914918 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05\": container with ID starting with 10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05 not found: ID does not exist" containerID="10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.914935 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05"} err="failed to get container status \"10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05\": rpc error: code = NotFound desc = could not find container \"10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05\": container with ID starting with 10d4f7d1e2e7ad884d23949a3f67af85297af1290062ac3e280fc3e9dc177d05 not found: ID does not exist" Oct 09 00:21:54 crc kubenswrapper[4810]: I1009 00:21:54.985323 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/20c3a267-4124-401c-9be2-69065acf4767-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:21:55 crc kubenswrapper[4810]: I1009 00:21:55.266421 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" path="/var/lib/kubelet/pods/d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba/volumes" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.633624 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 09 00:21:57 crc kubenswrapper[4810]: E1009 00:21:57.634216 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="git-clone" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634233 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="git-clone" Oct 09 00:21:57 crc kubenswrapper[4810]: E1009 00:21:57.634252 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="registry-server" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634259 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="registry-server" Oct 09 00:21:57 crc kubenswrapper[4810]: E1009 00:21:57.634268 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="extract-content" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634276 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="extract-content" Oct 09 00:21:57 crc kubenswrapper[4810]: E1009 00:21:57.634284 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="docker-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634291 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="docker-build" Oct 09 00:21:57 crc kubenswrapper[4810]: E1009 00:21:57.634301 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="extract-utilities" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634309 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="extract-utilities" Oct 09 00:21:57 crc kubenswrapper[4810]: E1009 00:21:57.634324 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="manage-dockerfile" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634332 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="manage-dockerfile" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634480 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="20c3a267-4124-401c-9be2-69065acf4767" containerName="docker-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.634503 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6291bcc-cef7-4c39-afd1-81f7f9b3f1ba" containerName="registry-server" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.635217 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.641401 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-ca" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.641410 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-sys-config" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.641405 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-global-ca" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.641658 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.684874 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824497 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zc7h\" (UniqueName: \"kubernetes.io/projected/0ab8a8dd-f609-4c12-96b7-9017ee61a058-kube-api-access-8zc7h\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824557 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-push\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824593 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824618 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824643 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildworkdir\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824674 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildcachedir\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824714 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-root\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824743 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824764 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-system-configs\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824789 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-pull\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824846 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-run\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.824887 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.926184 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zc7h\" (UniqueName: \"kubernetes.io/projected/0ab8a8dd-f609-4c12-96b7-9017ee61a058-kube-api-access-8zc7h\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.926595 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-push\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927047 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927245 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927421 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildworkdir\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927602 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildcachedir\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927770 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-root\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927995 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928157 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-system-configs\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928308 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-pull\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928451 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-root\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928066 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildworkdir\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927628 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928181 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.927675 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildcachedir\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928860 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-run\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.929052 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.929312 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-system-configs\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.928947 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-run\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.930321 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.931167 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.934049 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-pull\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.934630 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-push\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:57 crc kubenswrapper[4810]: I1009 00:21:57.975629 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zc7h\" (UniqueName: \"kubernetes.io/projected/0ab8a8dd-f609-4c12-96b7-9017ee61a058-kube-api-access-8zc7h\") pod \"sg-core-1-build\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " pod="service-telemetry/sg-core-1-build" Oct 09 00:21:58 crc kubenswrapper[4810]: I1009 00:21:58.251927 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 09 00:21:58 crc kubenswrapper[4810]: I1009 00:21:58.704199 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 09 00:21:58 crc kubenswrapper[4810]: I1009 00:21:58.884055 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"0ab8a8dd-f609-4c12-96b7-9017ee61a058","Type":"ContainerStarted","Data":"a275d829bed9f33884219c48e1b3c326fe7b01fb2d00414ebbae118e27658c50"} Oct 09 00:21:59 crc kubenswrapper[4810]: I1009 00:21:59.896180 4810 generic.go:334] "Generic (PLEG): container finished" podID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerID="db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f" exitCode=0 Oct 09 00:21:59 crc kubenswrapper[4810]: I1009 00:21:59.896309 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"0ab8a8dd-f609-4c12-96b7-9017ee61a058","Type":"ContainerDied","Data":"db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f"} Oct 09 00:22:00 crc kubenswrapper[4810]: I1009 00:22:00.908500 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"0ab8a8dd-f609-4c12-96b7-9017ee61a058","Type":"ContainerStarted","Data":"5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a"} Oct 09 00:22:00 crc kubenswrapper[4810]: I1009 00:22:00.949108 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-1-build" podStartSLOduration=3.949075392 podStartE2EDuration="3.949075392s" podCreationTimestamp="2025-10-09 00:21:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:22:00.945801818 +0000 UTC m=+918.471440579" watchObservedRunningTime="2025-10-09 00:22:00.949075392 +0000 UTC m=+918.474714153" Oct 09 00:22:07 crc kubenswrapper[4810]: I1009 00:22:07.886272 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 09 00:22:07 crc kubenswrapper[4810]: I1009 00:22:07.887033 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/sg-core-1-build" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerName="docker-build" containerID="cri-o://5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a" gracePeriod=30 Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.291191 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_0ab8a8dd-f609-4c12-96b7-9017ee61a058/docker-build/0.log" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.291840 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.476898 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-node-pullsecrets\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.476977 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-run\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477022 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-ca-bundles\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477055 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477092 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-system-configs\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477165 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-root\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477224 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-blob-cache\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477285 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-pull\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477323 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-proxy-ca-bundles\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.477365 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zc7h\" (UniqueName: \"kubernetes.io/projected/0ab8a8dd-f609-4c12-96b7-9017ee61a058-kube-api-access-8zc7h\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.478479 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.478601 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.478718 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildcachedir\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.478861 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-push\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.478915 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildworkdir\") pod \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\" (UID: \"0ab8a8dd-f609-4c12-96b7-9017ee61a058\") " Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.479250 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.480100 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481298 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481547 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481576 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0ab8a8dd-f609-4c12-96b7-9017ee61a058-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481595 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481625 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481643 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.481661 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.482163 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.486954 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.492957 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.502647 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ab8a8dd-f609-4c12-96b7-9017ee61a058-kube-api-access-8zc7h" (OuterVolumeSpecName: "kube-api-access-8zc7h") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "kube-api-access-8zc7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.583267 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.583769 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zc7h\" (UniqueName: \"kubernetes.io/projected/0ab8a8dd-f609-4c12-96b7-9017ee61a058-kube-api-access-8zc7h\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.584169 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/0ab8a8dd-f609-4c12-96b7-9017ee61a058-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.584467 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.584472 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.632423 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "0ab8a8dd-f609-4c12-96b7-9017ee61a058" (UID: "0ab8a8dd-f609-4c12-96b7-9017ee61a058"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.685214 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.685242 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0ab8a8dd-f609-4c12-96b7-9017ee61a058-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.963524 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_0ab8a8dd-f609-4c12-96b7-9017ee61a058/docker-build/0.log" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.964353 4810 generic.go:334] "Generic (PLEG): container finished" podID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerID="5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a" exitCode=1 Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.964414 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"0ab8a8dd-f609-4c12-96b7-9017ee61a058","Type":"ContainerDied","Data":"5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a"} Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.964461 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"0ab8a8dd-f609-4c12-96b7-9017ee61a058","Type":"ContainerDied","Data":"a275d829bed9f33884219c48e1b3c326fe7b01fb2d00414ebbae118e27658c50"} Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.964462 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Oct 09 00:22:08 crc kubenswrapper[4810]: I1009 00:22:08.964560 4810 scope.go:117] "RemoveContainer" containerID="5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.025998 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.036683 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-core-1-build"] Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.046347 4810 scope.go:117] "RemoveContainer" containerID="db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.077051 4810 scope.go:117] "RemoveContainer" containerID="5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a" Oct 09 00:22:09 crc kubenswrapper[4810]: E1009 00:22:09.077585 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a\": container with ID starting with 5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a not found: ID does not exist" containerID="5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.077622 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a"} err="failed to get container status \"5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a\": rpc error: code = NotFound desc = could not find container \"5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a\": container with ID starting with 5a1b0c094d6bc5383e683c52bf631296da439658418433404d11f6bdade7e70a not found: ID does not exist" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.077643 4810 scope.go:117] "RemoveContainer" containerID="db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f" Oct 09 00:22:09 crc kubenswrapper[4810]: E1009 00:22:09.078203 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f\": container with ID starting with db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f not found: ID does not exist" containerID="db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.078272 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f"} err="failed to get container status \"db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f\": rpc error: code = NotFound desc = could not find container \"db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f\": container with ID starting with db17798e9de3b3355dbe7beda9506a15cb713708e56128095e5fae28ff570d5f not found: ID does not exist" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.270069 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" path="/var/lib/kubelet/pods/0ab8a8dd-f609-4c12-96b7-9017ee61a058/volumes" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.508208 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-2-build"] Oct 09 00:22:09 crc kubenswrapper[4810]: E1009 00:22:09.508643 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerName="manage-dockerfile" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.508656 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerName="manage-dockerfile" Oct 09 00:22:09 crc kubenswrapper[4810]: E1009 00:22:09.508667 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerName="docker-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.508673 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerName="docker-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.508774 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ab8a8dd-f609-4c12-96b7-9017ee61a058" containerName="docker-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.509511 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.511204 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-global-ca" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.511280 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-sys-config" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.512205 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.512677 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-ca" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.528583 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596573 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596619 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-push\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596651 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596722 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-buildworkdir\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596786 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-system-configs\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596811 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-run\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596841 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-root\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596861 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-buildcachedir\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596879 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zhjb\" (UniqueName: \"kubernetes.io/projected/40d20127-9265-4de3-a7ae-a5493406441c-kube-api-access-5zhjb\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596922 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-pull\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.596976 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.597004 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698560 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698606 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698666 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698694 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-push\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698722 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698743 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-buildworkdir\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698750 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698781 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-system-configs\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698800 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-run\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698839 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-root\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698875 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-buildcachedir\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698896 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zhjb\" (UniqueName: \"kubernetes.io/projected/40d20127-9265-4de3-a7ae-a5493406441c-kube-api-access-5zhjb\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.698924 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-pull\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.699162 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-buildcachedir\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.699357 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.699603 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.699796 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.699974 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-run\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.700084 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-buildworkdir\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.700232 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-root\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.700253 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-system-configs\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.705768 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-pull\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.706042 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-push\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.714592 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zhjb\" (UniqueName: \"kubernetes.io/projected/40d20127-9265-4de3-a7ae-a5493406441c-kube-api-access-5zhjb\") pod \"sg-core-2-build\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " pod="service-telemetry/sg-core-2-build" Oct 09 00:22:09 crc kubenswrapper[4810]: I1009 00:22:09.821455 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 09 00:22:10 crc kubenswrapper[4810]: I1009 00:22:10.083271 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Oct 09 00:22:10 crc kubenswrapper[4810]: I1009 00:22:10.985318 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerStarted","Data":"64e6f2452861cb8988a387fd571f50f24bb512b1a8b541ac314893f94d0fe704"} Oct 09 00:22:10 crc kubenswrapper[4810]: I1009 00:22:10.985707 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerStarted","Data":"2859ac9ba9c9ad4476127ca35c471dac2fde3b872891ceaa073991f7b1e83845"} Oct 09 00:22:11 crc kubenswrapper[4810]: I1009 00:22:11.993911 4810 generic.go:334] "Generic (PLEG): container finished" podID="40d20127-9265-4de3-a7ae-a5493406441c" containerID="64e6f2452861cb8988a387fd571f50f24bb512b1a8b541ac314893f94d0fe704" exitCode=0 Oct 09 00:22:11 crc kubenswrapper[4810]: I1009 00:22:11.993963 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerDied","Data":"64e6f2452861cb8988a387fd571f50f24bb512b1a8b541ac314893f94d0fe704"} Oct 09 00:22:13 crc kubenswrapper[4810]: I1009 00:22:13.002781 4810 generic.go:334] "Generic (PLEG): container finished" podID="40d20127-9265-4de3-a7ae-a5493406441c" containerID="39d991ef2c775507872792bb37ec351cdc2a4ea19722e36464fe689d6dd839ee" exitCode=0 Oct 09 00:22:13 crc kubenswrapper[4810]: I1009 00:22:13.002870 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerDied","Data":"39d991ef2c775507872792bb37ec351cdc2a4ea19722e36464fe689d6dd839ee"} Oct 09 00:22:13 crc kubenswrapper[4810]: I1009 00:22:13.054247 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_40d20127-9265-4de3-a7ae-a5493406441c/manage-dockerfile/0.log" Oct 09 00:22:14 crc kubenswrapper[4810]: I1009 00:22:14.011794 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerStarted","Data":"be955de1372babc880d6175077360916f5dc319b0b419bc2992bfe049483cdd2"} Oct 09 00:22:14 crc kubenswrapper[4810]: I1009 00:22:14.065743 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-2-build" podStartSLOduration=5.065715669 podStartE2EDuration="5.065715669s" podCreationTimestamp="2025-10-09 00:22:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:22:14.05400368 +0000 UTC m=+931.579642401" watchObservedRunningTime="2025-10-09 00:22:14.065715669 +0000 UTC m=+931.591354410" Oct 09 00:22:21 crc kubenswrapper[4810]: I1009 00:22:21.184295 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:22:21 crc kubenswrapper[4810]: I1009 00:22:21.184921 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:22:51 crc kubenswrapper[4810]: I1009 00:22:51.184121 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:22:51 crc kubenswrapper[4810]: I1009 00:22:51.184663 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:22:51 crc kubenswrapper[4810]: I1009 00:22:51.184717 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:22:51 crc kubenswrapper[4810]: I1009 00:22:51.185324 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5efb6650f06feac4015d4c59a677ab43187f3e6f6c0ee6ea74bc01256cb113d3"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:22:51 crc kubenswrapper[4810]: I1009 00:22:51.185389 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://5efb6650f06feac4015d4c59a677ab43187f3e6f6c0ee6ea74bc01256cb113d3" gracePeriod=600 Oct 09 00:22:52 crc kubenswrapper[4810]: I1009 00:22:52.298879 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="5efb6650f06feac4015d4c59a677ab43187f3e6f6c0ee6ea74bc01256cb113d3" exitCode=0 Oct 09 00:22:52 crc kubenswrapper[4810]: I1009 00:22:52.298951 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"5efb6650f06feac4015d4c59a677ab43187f3e6f6c0ee6ea74bc01256cb113d3"} Oct 09 00:22:52 crc kubenswrapper[4810]: I1009 00:22:52.299594 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"70e29becead84dc56488e8fb810fb7212a994a5f2603ea5f084d6f1f12ab4086"} Oct 09 00:22:52 crc kubenswrapper[4810]: I1009 00:22:52.299630 4810 scope.go:117] "RemoveContainer" containerID="9655c39bfc737814bdf380a44432c260858cbf7ed04ea57c70188208f88269af" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.295938 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tcdcm"] Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.297464 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.303765 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tcdcm"] Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.426506 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-utilities\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.426555 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-catalog-content\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.426596 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s88x4\" (UniqueName: \"kubernetes.io/projected/a68bc3ed-b232-4c68-86d7-057bbbba1357-kube-api-access-s88x4\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.528374 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-utilities\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.528434 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-catalog-content\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.528475 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s88x4\" (UniqueName: \"kubernetes.io/projected/a68bc3ed-b232-4c68-86d7-057bbbba1357-kube-api-access-s88x4\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.529441 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-utilities\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.529644 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-catalog-content\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.549109 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s88x4\" (UniqueName: \"kubernetes.io/projected/a68bc3ed-b232-4c68-86d7-057bbbba1357-kube-api-access-s88x4\") pod \"certified-operators-tcdcm\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:10 crc kubenswrapper[4810]: I1009 00:23:10.612239 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:11 crc kubenswrapper[4810]: I1009 00:23:11.142396 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tcdcm"] Oct 09 00:23:11 crc kubenswrapper[4810]: W1009 00:23:11.149338 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda68bc3ed_b232_4c68_86d7_057bbbba1357.slice/crio-f8065ee650c2405cd0615a23f267739e9fce8f76b82dca153ef941534192f4a9 WatchSource:0}: Error finding container f8065ee650c2405cd0615a23f267739e9fce8f76b82dca153ef941534192f4a9: Status 404 returned error can't find the container with id f8065ee650c2405cd0615a23f267739e9fce8f76b82dca153ef941534192f4a9 Oct 09 00:23:11 crc kubenswrapper[4810]: I1009 00:23:11.427056 4810 generic.go:334] "Generic (PLEG): container finished" podID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerID="7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c" exitCode=0 Oct 09 00:23:11 crc kubenswrapper[4810]: I1009 00:23:11.427125 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerDied","Data":"7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c"} Oct 09 00:23:11 crc kubenswrapper[4810]: I1009 00:23:11.427360 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerStarted","Data":"f8065ee650c2405cd0615a23f267739e9fce8f76b82dca153ef941534192f4a9"} Oct 09 00:23:11 crc kubenswrapper[4810]: I1009 00:23:11.428731 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 00:23:12 crc kubenswrapper[4810]: I1009 00:23:12.435251 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerStarted","Data":"d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108"} Oct 09 00:23:13 crc kubenswrapper[4810]: I1009 00:23:13.445625 4810 generic.go:334] "Generic (PLEG): container finished" podID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerID="d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108" exitCode=0 Oct 09 00:23:13 crc kubenswrapper[4810]: I1009 00:23:13.445720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerDied","Data":"d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108"} Oct 09 00:23:14 crc kubenswrapper[4810]: I1009 00:23:14.461709 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerStarted","Data":"c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38"} Oct 09 00:23:14 crc kubenswrapper[4810]: I1009 00:23:14.483311 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tcdcm" podStartSLOduration=1.678930327 podStartE2EDuration="4.48329327s" podCreationTimestamp="2025-10-09 00:23:10 +0000 UTC" firstStartedPulling="2025-10-09 00:23:11.428480846 +0000 UTC m=+988.954119557" lastFinishedPulling="2025-10-09 00:23:14.232843769 +0000 UTC m=+991.758482500" observedRunningTime="2025-10-09 00:23:14.482657752 +0000 UTC m=+992.008296473" watchObservedRunningTime="2025-10-09 00:23:14.48329327 +0000 UTC m=+992.008931971" Oct 09 00:23:20 crc kubenswrapper[4810]: I1009 00:23:20.613329 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:20 crc kubenswrapper[4810]: I1009 00:23:20.613933 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:20 crc kubenswrapper[4810]: I1009 00:23:20.655384 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:21 crc kubenswrapper[4810]: I1009 00:23:21.563051 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:21 crc kubenswrapper[4810]: I1009 00:23:21.618972 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tcdcm"] Oct 09 00:23:23 crc kubenswrapper[4810]: I1009 00:23:23.515476 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tcdcm" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="registry-server" containerID="cri-o://c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38" gracePeriod=2 Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.133608 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.225284 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s88x4\" (UniqueName: \"kubernetes.io/projected/a68bc3ed-b232-4c68-86d7-057bbbba1357-kube-api-access-s88x4\") pod \"a68bc3ed-b232-4c68-86d7-057bbbba1357\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.225559 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-utilities\") pod \"a68bc3ed-b232-4c68-86d7-057bbbba1357\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.225635 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-catalog-content\") pod \"a68bc3ed-b232-4c68-86d7-057bbbba1357\" (UID: \"a68bc3ed-b232-4c68-86d7-057bbbba1357\") " Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.227188 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-utilities" (OuterVolumeSpecName: "utilities") pod "a68bc3ed-b232-4c68-86d7-057bbbba1357" (UID: "a68bc3ed-b232-4c68-86d7-057bbbba1357"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.230256 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a68bc3ed-b232-4c68-86d7-057bbbba1357-kube-api-access-s88x4" (OuterVolumeSpecName: "kube-api-access-s88x4") pod "a68bc3ed-b232-4c68-86d7-057bbbba1357" (UID: "a68bc3ed-b232-4c68-86d7-057bbbba1357"). InnerVolumeSpecName "kube-api-access-s88x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.268519 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a68bc3ed-b232-4c68-86d7-057bbbba1357" (UID: "a68bc3ed-b232-4c68-86d7-057bbbba1357"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.327641 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.327678 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a68bc3ed-b232-4c68-86d7-057bbbba1357-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.327695 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s88x4\" (UniqueName: \"kubernetes.io/projected/a68bc3ed-b232-4c68-86d7-057bbbba1357-kube-api-access-s88x4\") on node \"crc\" DevicePath \"\"" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.530056 4810 generic.go:334] "Generic (PLEG): container finished" podID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerID="c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38" exitCode=0 Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.530097 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerDied","Data":"c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38"} Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.530122 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tcdcm" event={"ID":"a68bc3ed-b232-4c68-86d7-057bbbba1357","Type":"ContainerDied","Data":"f8065ee650c2405cd0615a23f267739e9fce8f76b82dca153ef941534192f4a9"} Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.530126 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tcdcm" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.530139 4810 scope.go:117] "RemoveContainer" containerID="c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.551068 4810 scope.go:117] "RemoveContainer" containerID="d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.560377 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tcdcm"] Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.568663 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tcdcm"] Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.581981 4810 scope.go:117] "RemoveContainer" containerID="7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.599946 4810 scope.go:117] "RemoveContainer" containerID="c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38" Oct 09 00:23:25 crc kubenswrapper[4810]: E1009 00:23:25.604897 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38\": container with ID starting with c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38 not found: ID does not exist" containerID="c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.605161 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38"} err="failed to get container status \"c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38\": rpc error: code = NotFound desc = could not find container \"c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38\": container with ID starting with c9e2f047b2678489849cc132f22fe0e7f549f801547a18f444cbae114b06fd38 not found: ID does not exist" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.605273 4810 scope.go:117] "RemoveContainer" containerID="d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108" Oct 09 00:23:25 crc kubenswrapper[4810]: E1009 00:23:25.608763 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108\": container with ID starting with d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108 not found: ID does not exist" containerID="d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.608806 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108"} err="failed to get container status \"d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108\": rpc error: code = NotFound desc = could not find container \"d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108\": container with ID starting with d91a7d407088486f1066a5342f375b03029ef1e946d9e38d321cdd05e4177108 not found: ID does not exist" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.608854 4810 scope.go:117] "RemoveContainer" containerID="7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c" Oct 09 00:23:25 crc kubenswrapper[4810]: E1009 00:23:25.609250 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c\": container with ID starting with 7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c not found: ID does not exist" containerID="7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c" Oct 09 00:23:25 crc kubenswrapper[4810]: I1009 00:23:25.609382 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c"} err="failed to get container status \"7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c\": rpc error: code = NotFound desc = could not find container \"7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c\": container with ID starting with 7f6fd4b2dea47ca46d5fb1928e779bab3e3b0f490965a31a500de3db5fc5b92c not found: ID does not exist" Oct 09 00:23:27 crc kubenswrapper[4810]: I1009 00:23:27.261027 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" path="/var/lib/kubelet/pods/a68bc3ed-b232-4c68-86d7-057bbbba1357/volumes" Oct 09 00:24:51 crc kubenswrapper[4810]: I1009 00:24:51.184558 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:24:51 crc kubenswrapper[4810]: I1009 00:24:51.185138 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:25:21 crc kubenswrapper[4810]: I1009 00:25:21.184778 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:25:21 crc kubenswrapper[4810]: I1009 00:25:21.185479 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:25:35 crc kubenswrapper[4810]: I1009 00:25:35.446492 4810 generic.go:334] "Generic (PLEG): container finished" podID="40d20127-9265-4de3-a7ae-a5493406441c" containerID="be955de1372babc880d6175077360916f5dc319b0b419bc2992bfe049483cdd2" exitCode=0 Oct 09 00:25:35 crc kubenswrapper[4810]: I1009 00:25:35.446574 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerDied","Data":"be955de1372babc880d6175077360916f5dc319b0b419bc2992bfe049483cdd2"} Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.816299 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899007 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-pull\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899569 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-ca-bundles\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899599 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-system-configs\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899666 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-build-blob-cache\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899685 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-proxy-ca-bundles\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899702 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-buildworkdir\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899720 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-buildcachedir\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899746 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-node-pullsecrets\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899778 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zhjb\" (UniqueName: \"kubernetes.io/projected/40d20127-9265-4de3-a7ae-a5493406441c-kube-api-access-5zhjb\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899799 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-push\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899842 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-run\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.899868 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-root\") pod \"40d20127-9265-4de3-a7ae-a5493406441c\" (UID: \"40d20127-9265-4de3-a7ae-a5493406441c\") " Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.900288 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.900353 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.901042 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.901414 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.901457 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.901548 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.905057 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.906034 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40d20127-9265-4de3-a7ae-a5493406441c-kube-api-access-5zhjb" (OuterVolumeSpecName: "kube-api-access-5zhjb") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "kube-api-access-5zhjb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.910527 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:25:36 crc kubenswrapper[4810]: I1009 00:25:36.911918 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001608 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001659 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zhjb\" (UniqueName: \"kubernetes.io/projected/40d20127-9265-4de3-a7ae-a5493406441c-kube-api-access-5zhjb\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001682 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001699 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001717 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/40d20127-9265-4de3-a7ae-a5493406441c-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001732 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001747 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001762 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/40d20127-9265-4de3-a7ae-a5493406441c-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001776 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/40d20127-9265-4de3-a7ae-a5493406441c-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.001792 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.276286 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.306037 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.463436 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"40d20127-9265-4de3-a7ae-a5493406441c","Type":"ContainerDied","Data":"2859ac9ba9c9ad4476127ca35c471dac2fde3b872891ceaa073991f7b1e83845"} Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.463512 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2859ac9ba9c9ad4476127ca35c471dac2fde3b872891ceaa073991f7b1e83845" Oct 09 00:25:37 crc kubenswrapper[4810]: I1009 00:25:37.463515 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Oct 09 00:25:39 crc kubenswrapper[4810]: I1009 00:25:39.512162 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "40d20127-9265-4de3-a7ae-a5493406441c" (UID: "40d20127-9265-4de3-a7ae-a5493406441c"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:39 crc kubenswrapper[4810]: I1009 00:25:39.544663 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/40d20127-9265-4de3-a7ae-a5493406441c-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748456 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 09 00:25:41 crc kubenswrapper[4810]: E1009 00:25:41.748658 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="extract-content" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748672 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="extract-content" Oct 09 00:25:41 crc kubenswrapper[4810]: E1009 00:25:41.748686 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="manage-dockerfile" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748691 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="manage-dockerfile" Oct 09 00:25:41 crc kubenswrapper[4810]: E1009 00:25:41.748701 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="git-clone" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748707 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="git-clone" Oct 09 00:25:41 crc kubenswrapper[4810]: E1009 00:25:41.748719 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="docker-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748724 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="docker-build" Oct 09 00:25:41 crc kubenswrapper[4810]: E1009 00:25:41.748733 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="registry-server" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748739 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="registry-server" Oct 09 00:25:41 crc kubenswrapper[4810]: E1009 00:25:41.748747 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="extract-utilities" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748753 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="extract-utilities" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748880 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d20127-9265-4de3-a7ae-a5493406441c" containerName="docker-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.748895 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a68bc3ed-b232-4c68-86d7-057bbbba1357" containerName="registry-server" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.749422 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.751263 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-ca" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.751316 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-sys-config" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.751498 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.752114 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-global-ca" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.767115 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877609 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-pull\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877653 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sknxs\" (UniqueName: \"kubernetes.io/projected/224ddced-fb27-4858-859e-bd3f6eef22c6-kube-api-access-sknxs\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877698 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877765 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877781 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877807 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877845 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877885 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877902 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877926 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877954 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-push\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.877997 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978640 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978710 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-pull\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978734 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sknxs\" (UniqueName: \"kubernetes.io/projected/224ddced-fb27-4858-859e-bd3f6eef22c6-kube-api-access-sknxs\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978760 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978793 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978810 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978864 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978885 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978909 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978929 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978953 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.978977 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-push\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.979127 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.979544 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.979510 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.979762 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.979807 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.980077 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.980257 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.980381 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.980766 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.983759 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-pull\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.984667 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-push\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:41 crc kubenswrapper[4810]: I1009 00:25:41.997970 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sknxs\" (UniqueName: \"kubernetes.io/projected/224ddced-fb27-4858-859e-bd3f6eef22c6-kube-api-access-sknxs\") pod \"sg-bridge-1-build\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:42 crc kubenswrapper[4810]: I1009 00:25:42.070814 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:42 crc kubenswrapper[4810]: I1009 00:25:42.342652 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 09 00:25:42 crc kubenswrapper[4810]: I1009 00:25:42.499277 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"224ddced-fb27-4858-859e-bd3f6eef22c6","Type":"ContainerStarted","Data":"c81647bf9095ff7dbb210516bae71ccf96481be52c4345567abad8f7e790f2dd"} Oct 09 00:25:43 crc kubenswrapper[4810]: I1009 00:25:43.505499 4810 generic.go:334] "Generic (PLEG): container finished" podID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerID="5e92d851c2f743424d6db919b32e638a21212f3739a999a71b74cc36b340af79" exitCode=0 Oct 09 00:25:43 crc kubenswrapper[4810]: I1009 00:25:43.505539 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"224ddced-fb27-4858-859e-bd3f6eef22c6","Type":"ContainerDied","Data":"5e92d851c2f743424d6db919b32e638a21212f3739a999a71b74cc36b340af79"} Oct 09 00:25:44 crc kubenswrapper[4810]: I1009 00:25:44.515763 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"224ddced-fb27-4858-859e-bd3f6eef22c6","Type":"ContainerStarted","Data":"8c8579a4a2dc043878bb1bdc2989d903d7c346ae4f9e26608ab2e6cf69d7e278"} Oct 09 00:25:44 crc kubenswrapper[4810]: I1009 00:25:44.550156 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-1-build" podStartSLOduration=3.550124993 podStartE2EDuration="3.550124993s" podCreationTimestamp="2025-10-09 00:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:25:44.546868199 +0000 UTC m=+1142.072506990" watchObservedRunningTime="2025-10-09 00:25:44.550124993 +0000 UTC m=+1142.075763754" Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.184180 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.184812 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.184918 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.185783 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"70e29becead84dc56488e8fb810fb7212a994a5f2603ea5f084d6f1f12ab4086"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.185941 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://70e29becead84dc56488e8fb810fb7212a994a5f2603ea5f084d6f1f12ab4086" gracePeriod=600 Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.561305 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="70e29becead84dc56488e8fb810fb7212a994a5f2603ea5f084d6f1f12ab4086" exitCode=0 Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.561385 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"70e29becead84dc56488e8fb810fb7212a994a5f2603ea5f084d6f1f12ab4086"} Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.561660 4810 scope.go:117] "RemoveContainer" containerID="5efb6650f06feac4015d4c59a677ab43187f3e6f6c0ee6ea74bc01256cb113d3" Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.563582 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_224ddced-fb27-4858-859e-bd3f6eef22c6/docker-build/0.log" Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.564174 4810 generic.go:334] "Generic (PLEG): container finished" podID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerID="8c8579a4a2dc043878bb1bdc2989d903d7c346ae4f9e26608ab2e6cf69d7e278" exitCode=1 Oct 09 00:25:51 crc kubenswrapper[4810]: I1009 00:25:51.564224 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"224ddced-fb27-4858-859e-bd3f6eef22c6","Type":"ContainerDied","Data":"8c8579a4a2dc043878bb1bdc2989d903d7c346ae4f9e26608ab2e6cf69d7e278"} Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.006420 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.575533 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"debdd6b5cbff428344b7777ba4f55fefe79d94121774d2baf5531c75de88d838"} Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.871047 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_224ddced-fb27-4858-859e-bd3f6eef22c6/docker-build/0.log" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.871931 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.928886 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-buildcachedir\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.928946 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-build-blob-cache\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.928993 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-system-configs\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929016 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-node-pullsecrets\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929016 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929085 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-root\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929115 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929163 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-proxy-ca-bundles\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929210 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-pull\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929259 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sknxs\" (UniqueName: \"kubernetes.io/projected/224ddced-fb27-4858-859e-bd3f6eef22c6-kube-api-access-sknxs\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929305 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-run\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929340 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-ca-bundles\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929376 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-buildworkdir\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929426 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-push\") pod \"224ddced-fb27-4858-859e-bd3f6eef22c6\" (UID: \"224ddced-fb27-4858-859e-bd3f6eef22c6\") " Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929740 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.929761 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/224ddced-fb27-4858-859e-bd3f6eef22c6-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.930095 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.930323 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.930336 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.930501 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.930918 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.936167 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.939517 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:25:52 crc kubenswrapper[4810]: I1009 00:25:52.941179 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/224ddced-fb27-4858-859e-bd3f6eef22c6-kube-api-access-sknxs" (OuterVolumeSpecName: "kube-api-access-sknxs") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "kube-api-access-sknxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.007225 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030737 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sknxs\" (UniqueName: \"kubernetes.io/projected/224ddced-fb27-4858-859e-bd3f6eef22c6-kube-api-access-sknxs\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030769 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030781 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030792 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030801 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030810 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030859 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030870 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/224ddced-fb27-4858-859e-bd3f6eef22c6-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.030880 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/224ddced-fb27-4858-859e-bd3f6eef22c6-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.353573 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "224ddced-fb27-4858-859e-bd3f6eef22c6" (UID: "224ddced-fb27-4858-859e-bd3f6eef22c6"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.436572 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/224ddced-fb27-4858-859e-bd3f6eef22c6-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.587418 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_224ddced-fb27-4858-859e-bd3f6eef22c6/docker-build/0.log" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.588325 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"224ddced-fb27-4858-859e-bd3f6eef22c6","Type":"ContainerDied","Data":"c81647bf9095ff7dbb210516bae71ccf96481be52c4345567abad8f7e790f2dd"} Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.588368 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.588375 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c81647bf9095ff7dbb210516bae71ccf96481be52c4345567abad8f7e790f2dd" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.636281 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.641704 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.737534 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-2-build"] Oct 09 00:25:53 crc kubenswrapper[4810]: E1009 00:25:53.737791 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerName="manage-dockerfile" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.737813 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerName="manage-dockerfile" Oct 09 00:25:53 crc kubenswrapper[4810]: E1009 00:25:53.737844 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerName="docker-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.737852 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerName="docker-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.738005 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="224ddced-fb27-4858-859e-bd3f6eef22c6" containerName="docker-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.739755 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.741844 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.742139 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-sys-config" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.742218 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-global-ca" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.742586 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-ca" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.813144 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841177 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841236 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841273 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pksll\" (UniqueName: \"kubernetes.io/projected/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-kube-api-access-pksll\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841303 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-push\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841342 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841373 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841405 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841436 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841456 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841476 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841494 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.841515 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-pull\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.942437 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.942752 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.943132 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.943186 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.943656 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.943789 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.943947 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.944807 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945375 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945359 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945547 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945583 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945662 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945758 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-pull\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945914 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.945966 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.946032 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pksll\" (UniqueName: \"kubernetes.io/projected/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-kube-api-access-pksll\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.946105 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-push\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.946510 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.946903 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.947047 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.950953 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-push\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.951193 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-pull\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:53 crc kubenswrapper[4810]: I1009 00:25:53.981632 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pksll\" (UniqueName: \"kubernetes.io/projected/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-kube-api-access-pksll\") pod \"sg-bridge-2-build\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:54 crc kubenswrapper[4810]: I1009 00:25:54.067879 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 09 00:25:54 crc kubenswrapper[4810]: I1009 00:25:54.552813 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Oct 09 00:25:54 crc kubenswrapper[4810]: W1009 00:25:54.557800 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93038d6d_ba5d_4ccd_a5f8_7c4c1d8f97f5.slice/crio-de2e91e6c3f10a5add6d901f49bd4b504ab947be27ed2edbb572e98bc6742420 WatchSource:0}: Error finding container de2e91e6c3f10a5add6d901f49bd4b504ab947be27ed2edbb572e98bc6742420: Status 404 returned error can't find the container with id de2e91e6c3f10a5add6d901f49bd4b504ab947be27ed2edbb572e98bc6742420 Oct 09 00:25:54 crc kubenswrapper[4810]: I1009 00:25:54.594105 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerStarted","Data":"de2e91e6c3f10a5add6d901f49bd4b504ab947be27ed2edbb572e98bc6742420"} Oct 09 00:25:55 crc kubenswrapper[4810]: I1009 00:25:55.267856 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="224ddced-fb27-4858-859e-bd3f6eef22c6" path="/var/lib/kubelet/pods/224ddced-fb27-4858-859e-bd3f6eef22c6/volumes" Oct 09 00:25:55 crc kubenswrapper[4810]: I1009 00:25:55.603035 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerStarted","Data":"527a6671a5408a14d6bb93efacdb4b5100f0f5c13d0a208e13d22a56d0520c14"} Oct 09 00:25:56 crc kubenswrapper[4810]: I1009 00:25:56.612079 4810 generic.go:334] "Generic (PLEG): container finished" podID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerID="527a6671a5408a14d6bb93efacdb4b5100f0f5c13d0a208e13d22a56d0520c14" exitCode=0 Oct 09 00:25:56 crc kubenswrapper[4810]: I1009 00:25:56.612502 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerDied","Data":"527a6671a5408a14d6bb93efacdb4b5100f0f5c13d0a208e13d22a56d0520c14"} Oct 09 00:25:57 crc kubenswrapper[4810]: I1009 00:25:57.620090 4810 generic.go:334] "Generic (PLEG): container finished" podID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerID="aa1f5c1f15de82a82095f8efdeb499a0639da0b537b1fc5b7e5a7e616699279f" exitCode=0 Oct 09 00:25:57 crc kubenswrapper[4810]: I1009 00:25:57.620134 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerDied","Data":"aa1f5c1f15de82a82095f8efdeb499a0639da0b537b1fc5b7e5a7e616699279f"} Oct 09 00:25:57 crc kubenswrapper[4810]: I1009 00:25:57.646899 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5/manage-dockerfile/0.log" Oct 09 00:25:58 crc kubenswrapper[4810]: I1009 00:25:58.636244 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerStarted","Data":"275406f3097575fd8db2bf40b8e8d68178040077903b7fab22334f88de066b02"} Oct 09 00:25:58 crc kubenswrapper[4810]: I1009 00:25:58.683774 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-2-build" podStartSLOduration=5.683752792 podStartE2EDuration="5.683752792s" podCreationTimestamp="2025-10-09 00:25:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:25:58.678740657 +0000 UTC m=+1156.204379398" watchObservedRunningTime="2025-10-09 00:25:58.683752792 +0000 UTC m=+1156.209391503" Oct 09 00:26:43 crc kubenswrapper[4810]: I1009 00:26:43.952680 4810 generic.go:334] "Generic (PLEG): container finished" podID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerID="275406f3097575fd8db2bf40b8e8d68178040077903b7fab22334f88de066b02" exitCode=0 Oct 09 00:26:43 crc kubenswrapper[4810]: I1009 00:26:43.952756 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerDied","Data":"275406f3097575fd8db2bf40b8e8d68178040077903b7fab22334f88de066b02"} Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.244167 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381081 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-blob-cache\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381199 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-pull\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381259 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-proxy-ca-bundles\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381328 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-node-pullsecrets\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381409 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-push\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381438 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-system-configs\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381496 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-root\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381534 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildworkdir\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381565 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-run\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381594 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildcachedir\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381649 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pksll\" (UniqueName: \"kubernetes.io/projected/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-kube-api-access-pksll\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.381686 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-ca-bundles\") pod \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\" (UID: \"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5\") " Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.382122 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.382558 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.383080 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.383335 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.383699 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.383742 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.384294 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.389125 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.389162 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-kube-api-access-pksll" (OuterVolumeSpecName: "kube-api-access-pksll") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "kube-api-access-pksll". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.389166 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483037 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483074 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483086 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483099 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483110 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483126 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483142 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483157 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pksll\" (UniqueName: \"kubernetes.io/projected/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-kube-api-access-pksll\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483175 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.483190 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.496340 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.585151 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.970386 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5","Type":"ContainerDied","Data":"de2e91e6c3f10a5add6d901f49bd4b504ab947be27ed2edbb572e98bc6742420"} Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.970431 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de2e91e6c3f10a5add6d901f49bd4b504ab947be27ed2edbb572e98bc6742420" Oct 09 00:26:45 crc kubenswrapper[4810]: I1009 00:26:45.970500 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Oct 09 00:26:46 crc kubenswrapper[4810]: I1009 00:26:46.085414 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" (UID: "93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:26:46 crc kubenswrapper[4810]: I1009 00:26:46.092353 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.627748 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 09 00:26:50 crc kubenswrapper[4810]: E1009 00:26:50.628391 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="manage-dockerfile" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.628414 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="manage-dockerfile" Oct 09 00:26:50 crc kubenswrapper[4810]: E1009 00:26:50.628432 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="docker-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.628443 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="docker-build" Oct 09 00:26:50 crc kubenswrapper[4810]: E1009 00:26:50.628462 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="git-clone" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.628473 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="git-clone" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.628648 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="93038d6d-ba5d-4ccd-a5f8-7c4c1d8f97f5" containerName="docker-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.629583 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.633280 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-global-ca" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.633334 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-ca" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.633418 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.633672 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-sys-config" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.643915 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762144 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762200 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762232 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762265 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762299 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762361 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762382 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762406 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762425 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762439 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762459 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bpv9\" (UniqueName: \"kubernetes.io/projected/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-kube-api-access-9bpv9\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.762476 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864124 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bpv9\" (UniqueName: \"kubernetes.io/projected/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-kube-api-access-9bpv9\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864201 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864266 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864304 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864344 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864392 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864440 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864493 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864512 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864553 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864670 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864748 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864798 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.864814 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865024 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865271 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865415 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865641 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865653 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865695 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.865755 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.871311 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.871510 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.880915 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bpv9\" (UniqueName: \"kubernetes.io/projected/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-kube-api-access-9bpv9\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:50 crc kubenswrapper[4810]: I1009 00:26:50.955135 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:26:51 crc kubenswrapper[4810]: I1009 00:26:51.166014 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 09 00:26:52 crc kubenswrapper[4810]: I1009 00:26:52.039065 4810 generic.go:334] "Generic (PLEG): container finished" podID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerID="5ccfe84d17bdf110cd5a6b157c925ad762cbc1461cec785698c618862672818f" exitCode=0 Oct 09 00:26:52 crc kubenswrapper[4810]: I1009 00:26:52.039144 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e","Type":"ContainerDied","Data":"5ccfe84d17bdf110cd5a6b157c925ad762cbc1461cec785698c618862672818f"} Oct 09 00:26:52 crc kubenswrapper[4810]: I1009 00:26:52.039458 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e","Type":"ContainerStarted","Data":"c76647864d095a40f0652a12d2d739e27266ec3492e9a7fa07d170b87e794cb2"} Oct 09 00:26:53 crc kubenswrapper[4810]: I1009 00:26:53.054527 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e","Type":"ContainerStarted","Data":"b0d36f734830a38194061314f9baa18a3343b62ac1d8f5b4fa91023a73344f1c"} Oct 09 00:26:53 crc kubenswrapper[4810]: I1009 00:26:53.094467 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-1-build" podStartSLOduration=3.094449451 podStartE2EDuration="3.094449451s" podCreationTimestamp="2025-10-09 00:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:26:53.091739382 +0000 UTC m=+1210.617378163" watchObservedRunningTime="2025-10-09 00:26:53.094449451 +0000 UTC m=+1210.620088162" Oct 09 00:27:00 crc kubenswrapper[4810]: I1009 00:27:00.877317 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 09 00:27:00 crc kubenswrapper[4810]: I1009 00:27:00.878247 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/prometheus-webhook-snmp-1-build" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerName="docker-build" containerID="cri-o://b0d36f734830a38194061314f9baa18a3343b62ac1d8f5b4fa91023a73344f1c" gracePeriod=30 Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.113188 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e/docker-build/0.log" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.113862 4810 generic.go:334] "Generic (PLEG): container finished" podID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerID="b0d36f734830a38194061314f9baa18a3343b62ac1d8f5b4fa91023a73344f1c" exitCode=1 Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.113893 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e","Type":"ContainerDied","Data":"b0d36f734830a38194061314f9baa18a3343b62ac1d8f5b4fa91023a73344f1c"} Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.280685 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e/docker-build/0.log" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.281324 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420004 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-push\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420042 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bpv9\" (UniqueName: \"kubernetes.io/projected/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-kube-api-access-9bpv9\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420079 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-node-pullsecrets\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420098 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-proxy-ca-bundles\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420123 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-root\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420145 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildworkdir\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420174 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-blob-cache\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420174 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420202 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-system-configs\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420232 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildcachedir\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420260 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-pull\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420279 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-run\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420302 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-ca-bundles\") pod \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\" (UID: \"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e\") " Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420517 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420629 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.420941 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.421176 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.421232 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.421748 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.422056 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.426038 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.426361 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-kube-api-access-9bpv9" (OuterVolumeSpecName: "kube-api-access-9bpv9") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "kube-api-access-9bpv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.426645 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.471946 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522484 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522586 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bpv9\" (UniqueName: \"kubernetes.io/projected/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-kube-api-access-9bpv9\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522606 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522623 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522642 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522657 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522673 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522689 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522705 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.522721 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.767311 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" (UID: "a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:27:01 crc kubenswrapper[4810]: I1009 00:27:01.826456 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.123102 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e/docker-build/0.log" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.123719 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e","Type":"ContainerDied","Data":"c76647864d095a40f0652a12d2d739e27266ec3492e9a7fa07d170b87e794cb2"} Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.123798 4810 scope.go:117] "RemoveContainer" containerID="b0d36f734830a38194061314f9baa18a3343b62ac1d8f5b4fa91023a73344f1c" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.123955 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.151529 4810 scope.go:117] "RemoveContainer" containerID="5ccfe84d17bdf110cd5a6b157c925ad762cbc1461cec785698c618862672818f" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.188021 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.196272 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.489155 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Oct 09 00:27:02 crc kubenswrapper[4810]: E1009 00:27:02.489525 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerName="docker-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.489551 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerName="docker-build" Oct 09 00:27:02 crc kubenswrapper[4810]: E1009 00:27:02.489588 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerName="manage-dockerfile" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.489603 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerName="manage-dockerfile" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.489790 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" containerName="docker-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.491494 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.495379 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.495445 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-ca" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.496040 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-sys-config" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.496077 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-global-ca" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.512348 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539476 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539555 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539665 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539746 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539795 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539859 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.539931 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtq5n\" (UniqueName: \"kubernetes.io/projected/292f7277-3ab5-4f3e-bfb5-0742233fd314-kube-api-access-vtq5n\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.540102 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.540192 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.540263 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.540325 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.540424 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.642687 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.642771 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.642863 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.642906 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.642898 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.642946 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643018 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643064 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtq5n\" (UniqueName: \"kubernetes.io/projected/292f7277-3ab5-4f3e-bfb5-0742233fd314-kube-api-access-vtq5n\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643122 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643150 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643154 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643247 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643320 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643341 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643395 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.643724 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.644068 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.644220 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.644298 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.644666 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.644684 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.648106 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.648143 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.664997 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtq5n\" (UniqueName: \"kubernetes.io/projected/292f7277-3ab5-4f3e-bfb5-0742233fd314-kube-api-access-vtq5n\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:02 crc kubenswrapper[4810]: I1009 00:27:02.825017 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:27:03 crc kubenswrapper[4810]: I1009 00:27:03.030646 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Oct 09 00:27:03 crc kubenswrapper[4810]: I1009 00:27:03.133114 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerStarted","Data":"b154ac2f1b29835a47e4e4b67f9b4f0518cce2a30abb63a0418d19bcf0839bd4"} Oct 09 00:27:03 crc kubenswrapper[4810]: I1009 00:27:03.261664 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e" path="/var/lib/kubelet/pods/a9a2f9d5-0649-4107-a4a1-2bd5a40a8a5e/volumes" Oct 09 00:27:04 crc kubenswrapper[4810]: I1009 00:27:04.142455 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerStarted","Data":"33872840c04c114359fe056175504900e9827a68575687b7e0a612a279359b76"} Oct 09 00:27:04 crc kubenswrapper[4810]: E1009 00:27:04.304501 4810 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.177:59030->38.102.83.177:46105: write tcp 38.102.83.177:59030->38.102.83.177:46105: write: connection reset by peer Oct 09 00:27:05 crc kubenswrapper[4810]: I1009 00:27:05.160648 4810 generic.go:334] "Generic (PLEG): container finished" podID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerID="33872840c04c114359fe056175504900e9827a68575687b7e0a612a279359b76" exitCode=0 Oct 09 00:27:05 crc kubenswrapper[4810]: I1009 00:27:05.160703 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerDied","Data":"33872840c04c114359fe056175504900e9827a68575687b7e0a612a279359b76"} Oct 09 00:27:06 crc kubenswrapper[4810]: I1009 00:27:06.169335 4810 generic.go:334] "Generic (PLEG): container finished" podID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerID="47d3e1a8d86e9b140a0e5aea30d470740f067b7bd8046e2860cd195314583ec0" exitCode=0 Oct 09 00:27:06 crc kubenswrapper[4810]: I1009 00:27:06.169388 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerDied","Data":"47d3e1a8d86e9b140a0e5aea30d470740f067b7bd8046e2860cd195314583ec0"} Oct 09 00:27:06 crc kubenswrapper[4810]: I1009 00:27:06.227746 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_292f7277-3ab5-4f3e-bfb5-0742233fd314/manage-dockerfile/0.log" Oct 09 00:27:07 crc kubenswrapper[4810]: I1009 00:27:07.183195 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerStarted","Data":"32633b8a0936fb0ce12bc95f63cd95a482a018b9b5a557a55216847259803ab2"} Oct 09 00:27:07 crc kubenswrapper[4810]: I1009 00:27:07.228216 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-2-build" podStartSLOduration=5.228184473 podStartE2EDuration="5.228184473s" podCreationTimestamp="2025-10-09 00:27:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:27:07.224128576 +0000 UTC m=+1224.749767377" watchObservedRunningTime="2025-10-09 00:27:07.228184473 +0000 UTC m=+1224.753823224" Oct 09 00:27:51 crc kubenswrapper[4810]: I1009 00:27:51.184376 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:27:51 crc kubenswrapper[4810]: I1009 00:27:51.184839 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:28:05 crc kubenswrapper[4810]: I1009 00:28:05.641760 4810 generic.go:334] "Generic (PLEG): container finished" podID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerID="32633b8a0936fb0ce12bc95f63cd95a482a018b9b5a557a55216847259803ab2" exitCode=0 Oct 09 00:28:05 crc kubenswrapper[4810]: I1009 00:28:05.641882 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerDied","Data":"32633b8a0936fb0ce12bc95f63cd95a482a018b9b5a557a55216847259803ab2"} Oct 09 00:28:06 crc kubenswrapper[4810]: I1009 00:28:06.945493 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096161 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-pull\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096236 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildcachedir\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096295 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-ca-bundles\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096370 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildworkdir\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096370 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096443 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtq5n\" (UniqueName: \"kubernetes.io/projected/292f7277-3ab5-4f3e-bfb5-0742233fd314-kube-api-access-vtq5n\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096488 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-root\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096538 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-push\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096590 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-run\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096660 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-system-configs\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096715 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-node-pullsecrets\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096776 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-proxy-ca-bundles\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.096860 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-blob-cache\") pod \"292f7277-3ab5-4f3e-bfb5-0742233fd314\" (UID: \"292f7277-3ab5-4f3e-bfb5-0742233fd314\") " Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.097304 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.097358 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.097421 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.097554 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.097908 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.098635 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.099256 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.102214 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.102704 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.109817 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/292f7277-3ab5-4f3e-bfb5-0742233fd314-kube-api-access-vtq5n" (OuterVolumeSpecName: "kube-api-access-vtq5n") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "kube-api-access-vtq5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198608 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198645 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtq5n\" (UniqueName: \"kubernetes.io/projected/292f7277-3ab5-4f3e-bfb5-0742233fd314-kube-api-access-vtq5n\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198659 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198671 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198683 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198693 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/292f7277-3ab5-4f3e-bfb5-0742233fd314-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198703 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198713 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/292f7277-3ab5-4f3e-bfb5-0742233fd314-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.198724 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.209655 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.300443 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.660311 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"292f7277-3ab5-4f3e-bfb5-0742233fd314","Type":"ContainerDied","Data":"b154ac2f1b29835a47e4e4b67f9b4f0518cce2a30abb63a0418d19bcf0839bd4"} Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.660799 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b154ac2f1b29835a47e4e4b67f9b4f0518cce2a30abb63a0418d19bcf0839bd4" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.660438 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Oct 09 00:28:07 crc kubenswrapper[4810]: I1009 00:28:07.990621 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "292f7277-3ab5-4f3e-bfb5-0742233fd314" (UID: "292f7277-3ab5-4f3e-bfb5-0742233fd314"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:08 crc kubenswrapper[4810]: I1009 00:28:08.011377 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/292f7277-3ab5-4f3e-bfb5-0742233fd314-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.402159 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 09 00:28:16 crc kubenswrapper[4810]: E1009 00:28:16.402952 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="git-clone" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.402969 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="git-clone" Oct 09 00:28:16 crc kubenswrapper[4810]: E1009 00:28:16.403004 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="docker-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.403017 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="docker-build" Oct 09 00:28:16 crc kubenswrapper[4810]: E1009 00:28:16.403033 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="manage-dockerfile" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.403042 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="manage-dockerfile" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.403184 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="292f7277-3ab5-4f3e-bfb5-0742233fd314" containerName="docker-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.403900 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.409060 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-ca" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.409308 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-global-ca" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.409461 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-1-sys-config" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.409721 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.430587 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.536936 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.536975 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp9hr\" (UniqueName: \"kubernetes.io/projected/6038c31d-ccf3-4e00-aec1-a17021cd1b83-kube-api-access-tp9hr\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537003 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537137 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537231 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537354 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537459 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537506 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537546 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537574 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537658 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.537700 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.638688 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.638765 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.638860 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.638913 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.638960 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639000 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639041 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639073 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639131 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639175 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639194 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-blob-cache\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639217 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639284 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp9hr\" (UniqueName: \"kubernetes.io/projected/6038c31d-ccf3-4e00-aec1-a17021cd1b83-kube-api-access-tp9hr\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639554 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildcachedir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639900 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-run\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.639902 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildworkdir\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.640005 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-node-pullsecrets\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.640485 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-root\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.640556 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-system-configs\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.640733 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.641279 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.645807 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.646517 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.673336 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp9hr\" (UniqueName: \"kubernetes.io/projected/6038c31d-ccf3-4e00-aec1-a17021cd1b83-kube-api-access-tp9hr\") pod \"service-telemetry-operator-bundle-1-build\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.736962 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:16 crc kubenswrapper[4810]: I1009 00:28:16.971184 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 09 00:28:17 crc kubenswrapper[4810]: I1009 00:28:17.732660 4810 generic.go:334] "Generic (PLEG): container finished" podID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerID="4e8d58a44d45850c9c745d7a022289ab8eeb4a2fbe3e81ef2bc57f8564d9d432" exitCode=0 Oct 09 00:28:17 crc kubenswrapper[4810]: I1009 00:28:17.732750 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"6038c31d-ccf3-4e00-aec1-a17021cd1b83","Type":"ContainerDied","Data":"4e8d58a44d45850c9c745d7a022289ab8eeb4a2fbe3e81ef2bc57f8564d9d432"} Oct 09 00:28:17 crc kubenswrapper[4810]: I1009 00:28:17.734064 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"6038c31d-ccf3-4e00-aec1-a17021cd1b83","Type":"ContainerStarted","Data":"34745c0fcdf2975e42019f656c79ad1bcff922f77a87b476750d0dbce00fb035"} Oct 09 00:28:18 crc kubenswrapper[4810]: I1009 00:28:18.742724 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_6038c31d-ccf3-4e00-aec1-a17021cd1b83/docker-build/0.log" Oct 09 00:28:18 crc kubenswrapper[4810]: I1009 00:28:18.745146 4810 generic.go:334] "Generic (PLEG): container finished" podID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerID="cd743d278c205e518a8a189dd4232b7e51f2555db58e5370ad453076c44ed58c" exitCode=1 Oct 09 00:28:18 crc kubenswrapper[4810]: I1009 00:28:18.745201 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"6038c31d-ccf3-4e00-aec1-a17021cd1b83","Type":"ContainerDied","Data":"cd743d278c205e518a8a189dd4232b7e51f2555db58e5370ad453076c44ed58c"} Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.091248 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_6038c31d-ccf3-4e00-aec1-a17021cd1b83/docker-build/0.log" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.091924 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289102 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-blob-cache\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289159 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-push\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289205 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildworkdir\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289244 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-root\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289332 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildcachedir\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289363 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-pull\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289410 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp9hr\" (UniqueName: \"kubernetes.io/projected/6038c31d-ccf3-4e00-aec1-a17021cd1b83-kube-api-access-tp9hr\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289444 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-system-configs\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289429 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289472 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-ca-bundles\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289503 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-proxy-ca-bundles\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289568 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-node-pullsecrets\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289596 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-run\") pod \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\" (UID: \"6038c31d-ccf3-4e00-aec1-a17021cd1b83\") " Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289612 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289889 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289973 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.289998 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.290224 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.291037 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.291286 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.291643 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.291644 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.291924 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.299649 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.300888 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.302908 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6038c31d-ccf3-4e00-aec1-a17021cd1b83-kube-api-access-tp9hr" (OuterVolumeSpecName: "kube-api-access-tp9hr") pod "6038c31d-ccf3-4e00-aec1-a17021cd1b83" (UID: "6038c31d-ccf3-4e00-aec1-a17021cd1b83"). InnerVolumeSpecName "kube-api-access-tp9hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392078 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392148 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392173 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392197 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/6038c31d-ccf3-4e00-aec1-a17021cd1b83-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392224 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp9hr\" (UniqueName: \"kubernetes.io/projected/6038c31d-ccf3-4e00-aec1-a17021cd1b83-kube-api-access-tp9hr\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392248 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392274 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392298 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6038c31d-ccf3-4e00-aec1-a17021cd1b83-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392324 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6038c31d-ccf3-4e00-aec1-a17021cd1b83-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.392347 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/6038c31d-ccf3-4e00-aec1-a17021cd1b83-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.789028 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-1-build_6038c31d-ccf3-4e00-aec1-a17021cd1b83/docker-build/0.log" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.789690 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-1-build" event={"ID":"6038c31d-ccf3-4e00-aec1-a17021cd1b83","Type":"ContainerDied","Data":"34745c0fcdf2975e42019f656c79ad1bcff922f77a87b476750d0dbce00fb035"} Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.789755 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34745c0fcdf2975e42019f656c79ad1bcff922f77a87b476750d0dbce00fb035" Oct 09 00:28:20 crc kubenswrapper[4810]: I1009 00:28:20.789765 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-1-build" Oct 09 00:28:21 crc kubenswrapper[4810]: I1009 00:28:21.184342 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:28:21 crc kubenswrapper[4810]: I1009 00:28:21.184744 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:28:26 crc kubenswrapper[4810]: I1009 00:28:26.891598 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 09 00:28:26 crc kubenswrapper[4810]: I1009 00:28:26.901954 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-1-build"] Oct 09 00:28:27 crc kubenswrapper[4810]: I1009 00:28:27.268109 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" path="/var/lib/kubelet/pods/6038c31d-ccf3-4e00-aec1-a17021cd1b83/volumes" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.478715 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Oct 09 00:28:28 crc kubenswrapper[4810]: E1009 00:28:28.479715 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerName="docker-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.479732 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerName="docker-build" Oct 09 00:28:28 crc kubenswrapper[4810]: E1009 00:28:28.479765 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerName="manage-dockerfile" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.479775 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerName="manage-dockerfile" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.479913 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="6038c31d-ccf3-4e00-aec1-a17021cd1b83" containerName="docker-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.480893 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.487287 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-sys-config" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.487287 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-global-ca" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.487587 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-bundle-2-ca" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.487738 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.509129 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.534591 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.534642 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.534800 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.534949 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535015 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535067 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx5jd\" (UniqueName: \"kubernetes.io/projected/68cf26a7-a7d0-428d-991f-03be15bccd0c-kube-api-access-mx5jd\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535127 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535209 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535255 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535297 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535323 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.535345 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636682 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636766 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636800 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636850 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636875 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636938 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636961 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.636988 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.637017 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.637038 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.637065 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx5jd\" (UniqueName: \"kubernetes.io/projected/68cf26a7-a7d0-428d-991f-03be15bccd0c-kube-api-access-mx5jd\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.637089 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.637083 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-node-pullsecrets\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.637165 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildcachedir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.638771 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildworkdir\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.639265 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-system-configs\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.639293 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-root\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.639568 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-blob-cache\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.639634 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.639885 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-ca-bundles\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.639880 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-run\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.645509 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.654653 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-push\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.665861 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx5jd\" (UniqueName: \"kubernetes.io/projected/68cf26a7-a7d0-428d-991f-03be15bccd0c-kube-api-access-mx5jd\") pod \"service-telemetry-operator-bundle-2-build\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:28 crc kubenswrapper[4810]: I1009 00:28:28.809142 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:29 crc kubenswrapper[4810]: I1009 00:28:29.012807 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-bundle-2-build"] Oct 09 00:28:29 crc kubenswrapper[4810]: I1009 00:28:29.863657 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerStarted","Data":"a82cfd1e726d73ca41d0da65e13c57a015e7839f5dee55dca8004bbd780af37d"} Oct 09 00:28:29 crc kubenswrapper[4810]: I1009 00:28:29.864223 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerStarted","Data":"422d76c1f5025bb8e109f5df3adb4d1ebeb48416d21c383464fb69e2101ca3ac"} Oct 09 00:28:30 crc kubenswrapper[4810]: I1009 00:28:30.874559 4810 generic.go:334] "Generic (PLEG): container finished" podID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerID="a82cfd1e726d73ca41d0da65e13c57a015e7839f5dee55dca8004bbd780af37d" exitCode=0 Oct 09 00:28:30 crc kubenswrapper[4810]: I1009 00:28:30.874789 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerDied","Data":"a82cfd1e726d73ca41d0da65e13c57a015e7839f5dee55dca8004bbd780af37d"} Oct 09 00:28:31 crc kubenswrapper[4810]: I1009 00:28:31.887806 4810 generic.go:334] "Generic (PLEG): container finished" podID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerID="8b8f943ebda16a067eab3c9ad97acf78946591a548af691a4739f0a4eca4964c" exitCode=0 Oct 09 00:28:31 crc kubenswrapper[4810]: I1009 00:28:31.888106 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerDied","Data":"8b8f943ebda16a067eab3c9ad97acf78946591a548af691a4739f0a4eca4964c"} Oct 09 00:28:31 crc kubenswrapper[4810]: I1009 00:28:31.946058 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-bundle-2-build_68cf26a7-a7d0-428d-991f-03be15bccd0c/manage-dockerfile/0.log" Oct 09 00:28:32 crc kubenswrapper[4810]: I1009 00:28:32.899167 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerStarted","Data":"9dc8abcafb53a1db9fc565104496531957c3b51165bcf26e1e1e8b616b60f890"} Oct 09 00:28:32 crc kubenswrapper[4810]: I1009 00:28:32.934572 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-bundle-2-build" podStartSLOduration=4.934551555 podStartE2EDuration="4.934551555s" podCreationTimestamp="2025-10-09 00:28:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:28:32.929265863 +0000 UTC m=+1310.454904574" watchObservedRunningTime="2025-10-09 00:28:32.934551555 +0000 UTC m=+1310.460190296" Oct 09 00:28:35 crc kubenswrapper[4810]: I1009 00:28:35.926367 4810 generic.go:334] "Generic (PLEG): container finished" podID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerID="9dc8abcafb53a1db9fc565104496531957c3b51165bcf26e1e1e8b616b60f890" exitCode=0 Oct 09 00:28:35 crc kubenswrapper[4810]: I1009 00:28:35.926516 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerDied","Data":"9dc8abcafb53a1db9fc565104496531957c3b51165bcf26e1e1e8b616b60f890"} Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.206660 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259110 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-node-pullsecrets\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259180 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-push\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259226 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-system-configs\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259249 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-proxy-ca-bundles\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259297 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-pull\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259319 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildworkdir\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259340 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx5jd\" (UniqueName: \"kubernetes.io/projected/68cf26a7-a7d0-428d-991f-03be15bccd0c-kube-api-access-mx5jd\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259376 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-run\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259392 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-root\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259410 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-ca-bundles\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259448 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildcachedir\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.259465 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-blob-cache\") pod \"68cf26a7-a7d0-428d-991f-03be15bccd0c\" (UID: \"68cf26a7-a7d0-428d-991f-03be15bccd0c\") " Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.261030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.261098 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.262125 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.262294 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.262882 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.262995 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.263552 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.263671 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.266895 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.267274 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.270013 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68cf26a7-a7d0-428d-991f-03be15bccd0c-kube-api-access-mx5jd" (OuterVolumeSpecName: "kube-api-access-mx5jd") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "kube-api-access-mx5jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.274349 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "68cf26a7-a7d0-428d-991f-03be15bccd0c" (UID: "68cf26a7-a7d0-428d-991f-03be15bccd0c"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360396 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360447 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360468 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx5jd\" (UniqueName: \"kubernetes.io/projected/68cf26a7-a7d0-428d-991f-03be15bccd0c-kube-api-access-mx5jd\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360485 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360502 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360518 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360534 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360550 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360567 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/68cf26a7-a7d0-428d-991f-03be15bccd0c-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360583 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/68cf26a7-a7d0-428d-991f-03be15bccd0c-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360599 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.360616 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/68cf26a7-a7d0-428d-991f-03be15bccd0c-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.948206 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-bundle-2-build" event={"ID":"68cf26a7-a7d0-428d-991f-03be15bccd0c","Type":"ContainerDied","Data":"422d76c1f5025bb8e109f5df3adb4d1ebeb48416d21c383464fb69e2101ca3ac"} Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.948285 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="422d76c1f5025bb8e109f5df3adb4d1ebeb48416d21c383464fb69e2101ca3ac" Oct 09 00:28:37 crc kubenswrapper[4810]: I1009 00:28:37.948401 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-bundle-2-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.180347 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 09 00:28:41 crc kubenswrapper[4810]: E1009 00:28:41.181251 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="manage-dockerfile" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.181267 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="manage-dockerfile" Oct 09 00:28:41 crc kubenswrapper[4810]: E1009 00:28:41.181282 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="git-clone" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.181291 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="git-clone" Oct 09 00:28:41 crc kubenswrapper[4810]: E1009 00:28:41.181305 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="docker-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.181313 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="docker-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.181455 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="68cf26a7-a7d0-428d-991f-03be15bccd0c" containerName="docker-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.182255 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: W1009 00:28:41.184542 4810 reflector.go:561] object-"service-telemetry"/"smart-gateway-operator-bundle-1-sys-config": failed to list *v1.ConfigMap: configmaps "smart-gateway-operator-bundle-1-sys-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "service-telemetry": no relationship found between node 'crc' and this object Oct 09 00:28:41 crc kubenswrapper[4810]: E1009 00:28:41.184626 4810 reflector.go:158] "Unhandled Error" err="object-\"service-telemetry\"/\"smart-gateway-operator-bundle-1-sys-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"smart-gateway-operator-bundle-1-sys-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"service-telemetry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.184684 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:28:41 crc kubenswrapper[4810]: W1009 00:28:41.184950 4810 reflector.go:561] object-"service-telemetry"/"smart-gateway-operator-bundle-1-ca": failed to list *v1.ConfigMap: configmaps "smart-gateway-operator-bundle-1-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "service-telemetry": no relationship found between node 'crc' and this object Oct 09 00:28:41 crc kubenswrapper[4810]: E1009 00:28:41.184988 4810 reflector.go:158] "Unhandled Error" err="object-\"service-telemetry\"/\"smart-gateway-operator-bundle-1-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"smart-gateway-operator-bundle-1-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"service-telemetry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.186034 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-global-ca" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.210690 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.315677 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.315745 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316051 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316124 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316251 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316295 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316339 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316402 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316458 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316490 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptrc2\" (UniqueName: \"kubernetes.io/projected/911baddf-4619-4019-a86e-2b17f515b4c9-kube-api-access-ptrc2\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316514 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.316537 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417270 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417307 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417328 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417352 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417384 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417403 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptrc2\" (UniqueName: \"kubernetes.io/projected/911baddf-4619-4019-a86e-2b17f515b4c9-kube-api-access-ptrc2\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417424 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417451 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417480 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417503 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417534 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417547 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-buildcachedir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417618 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.417566 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-node-pullsecrets\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.418437 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-run\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.418839 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-root\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.418914 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-build-blob-cache\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.419581 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-buildworkdir\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.419633 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.422946 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.424605 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:41 crc kubenswrapper[4810]: I1009 00:28:41.438953 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptrc2\" (UniqueName: \"kubernetes.io/projected/911baddf-4619-4019-a86e-2b17f515b4c9-kube-api-access-ptrc2\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:42 crc kubenswrapper[4810]: I1009 00:28:42.353298 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-ca" Oct 09 00:28:42 crc kubenswrapper[4810]: I1009 00:28:42.359738 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-ca-bundles\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:42 crc kubenswrapper[4810]: E1009 00:28:42.418118 4810 configmap.go:193] Couldn't get configMap service-telemetry/smart-gateway-operator-bundle-1-sys-config: failed to sync configmap cache: timed out waiting for the condition Oct 09 00:28:42 crc kubenswrapper[4810]: E1009 00:28:42.418282 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs podName:911baddf-4619-4019-a86e-2b17f515b4c9 nodeName:}" failed. No retries permitted until 2025-10-09 00:28:42.918242203 +0000 UTC m=+1320.443880944 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "build-system-configs" (UniqueName: "kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs") pod "smart-gateway-operator-bundle-1-build" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9") : failed to sync configmap cache: timed out waiting for the condition Oct 09 00:28:42 crc kubenswrapper[4810]: I1009 00:28:42.423665 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-1-sys-config" Oct 09 00:28:42 crc kubenswrapper[4810]: I1009 00:28:42.940568 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:42 crc kubenswrapper[4810]: I1009 00:28:42.942174 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs\") pod \"smart-gateway-operator-bundle-1-build\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:43 crc kubenswrapper[4810]: I1009 00:28:43.006734 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:43 crc kubenswrapper[4810]: I1009 00:28:43.540872 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 09 00:28:43 crc kubenswrapper[4810]: I1009 00:28:43.994927 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"911baddf-4619-4019-a86e-2b17f515b4c9","Type":"ContainerStarted","Data":"8c2e9b3c8c1082617e1ec64b689149cf307672210b08c1c3446d24e913d31e86"} Oct 09 00:28:43 crc kubenswrapper[4810]: I1009 00:28:43.995196 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"911baddf-4619-4019-a86e-2b17f515b4c9","Type":"ContainerStarted","Data":"d3515e213237419c28af3a112f872b323fbdc24ec8cc8e039644d0f084308b02"} Oct 09 00:28:45 crc kubenswrapper[4810]: I1009 00:28:45.005211 4810 generic.go:334] "Generic (PLEG): container finished" podID="911baddf-4619-4019-a86e-2b17f515b4c9" containerID="8c2e9b3c8c1082617e1ec64b689149cf307672210b08c1c3446d24e913d31e86" exitCode=0 Oct 09 00:28:45 crc kubenswrapper[4810]: I1009 00:28:45.005282 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"911baddf-4619-4019-a86e-2b17f515b4c9","Type":"ContainerDied","Data":"8c2e9b3c8c1082617e1ec64b689149cf307672210b08c1c3446d24e913d31e86"} Oct 09 00:28:46 crc kubenswrapper[4810]: I1009 00:28:46.015827 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"911baddf-4619-4019-a86e-2b17f515b4c9","Type":"ContainerStarted","Data":"5d04ab197d1af30672995d00d7187a3f0313cb99e1c6b77e898566c0f981cc73"} Oct 09 00:28:46 crc kubenswrapper[4810]: I1009 00:28:46.045695 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-bundle-1-build" podStartSLOduration=5.045666899 podStartE2EDuration="5.045666899s" podCreationTimestamp="2025-10-09 00:28:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:28:46.043463815 +0000 UTC m=+1323.569102526" watchObservedRunningTime="2025-10-09 00:28:46.045666899 +0000 UTC m=+1323.571305640" Oct 09 00:28:47 crc kubenswrapper[4810]: I1009 00:28:47.025314 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_911baddf-4619-4019-a86e-2b17f515b4c9/docker-build/0.log" Oct 09 00:28:47 crc kubenswrapper[4810]: I1009 00:28:47.026168 4810 generic.go:334] "Generic (PLEG): container finished" podID="911baddf-4619-4019-a86e-2b17f515b4c9" containerID="5d04ab197d1af30672995d00d7187a3f0313cb99e1c6b77e898566c0f981cc73" exitCode=1 Oct 09 00:28:47 crc kubenswrapper[4810]: I1009 00:28:47.026225 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"911baddf-4619-4019-a86e-2b17f515b4c9","Type":"ContainerDied","Data":"5d04ab197d1af30672995d00d7187a3f0313cb99e1c6b77e898566c0f981cc73"} Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.321080 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_911baddf-4619-4019-a86e-2b17f515b4c9/docker-build/0.log" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.321573 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482393 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-push\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482524 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptrc2\" (UniqueName: \"kubernetes.io/projected/911baddf-4619-4019-a86e-2b17f515b4c9-kube-api-access-ptrc2\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482646 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482710 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-proxy-ca-bundles\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482791 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-build-blob-cache\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482887 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-buildworkdir\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482937 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-pull\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.482994 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-buildcachedir\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483050 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-run\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483112 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-ca-bundles\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483149 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-node-pullsecrets\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483191 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-root\") pod \"911baddf-4619-4019-a86e-2b17f515b4c9\" (UID: \"911baddf-4619-4019-a86e-2b17f515b4c9\") " Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483303 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483356 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483601 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483644 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483674 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.483672 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.484048 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.484095 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.484493 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.484575 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.486631 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.487869 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/911baddf-4619-4019-a86e-2b17f515b4c9-kube-api-access-ptrc2" (OuterVolumeSpecName: "kube-api-access-ptrc2") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "kube-api-access-ptrc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.488030 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.493029 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "911baddf-4619-4019-a86e-2b17f515b4c9" (UID: "911baddf-4619-4019-a86e-2b17f515b4c9"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.585579 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586006 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586043 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586067 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586095 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586116 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/911baddf-4619-4019-a86e-2b17f515b4c9-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586137 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/911baddf-4619-4019-a86e-2b17f515b4c9-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586159 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/911baddf-4619-4019-a86e-2b17f515b4c9-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586181 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/911baddf-4619-4019-a86e-2b17f515b4c9-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:48 crc kubenswrapper[4810]: I1009 00:28:48.586201 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptrc2\" (UniqueName: \"kubernetes.io/projected/911baddf-4619-4019-a86e-2b17f515b4c9-kube-api-access-ptrc2\") on node \"crc\" DevicePath \"\"" Oct 09 00:28:49 crc kubenswrapper[4810]: I1009 00:28:49.042563 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-1-build_911baddf-4619-4019-a86e-2b17f515b4c9/docker-build/0.log" Oct 09 00:28:49 crc kubenswrapper[4810]: I1009 00:28:49.043428 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-1-build" event={"ID":"911baddf-4619-4019-a86e-2b17f515b4c9","Type":"ContainerDied","Data":"d3515e213237419c28af3a112f872b323fbdc24ec8cc8e039644d0f084308b02"} Oct 09 00:28:49 crc kubenswrapper[4810]: I1009 00:28:49.043498 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3515e213237419c28af3a112f872b323fbdc24ec8cc8e039644d0f084308b02" Oct 09 00:28:49 crc kubenswrapper[4810]: I1009 00:28:49.043511 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-1-build" Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.185448 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.185536 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.185591 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.186524 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"debdd6b5cbff428344b7777ba4f55fefe79d94121774d2baf5531c75de88d838"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.186651 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://debdd6b5cbff428344b7777ba4f55fefe79d94121774d2baf5531c75de88d838" gracePeriod=600 Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.741236 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 09 00:28:51 crc kubenswrapper[4810]: I1009 00:28:51.745192 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-1-build"] Oct 09 00:28:52 crc kubenswrapper[4810]: I1009 00:28:52.067079 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="debdd6b5cbff428344b7777ba4f55fefe79d94121774d2baf5531c75de88d838" exitCode=0 Oct 09 00:28:52 crc kubenswrapper[4810]: I1009 00:28:52.067303 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"debdd6b5cbff428344b7777ba4f55fefe79d94121774d2baf5531c75de88d838"} Oct 09 00:28:52 crc kubenswrapper[4810]: I1009 00:28:52.067384 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d"} Oct 09 00:28:52 crc kubenswrapper[4810]: I1009 00:28:52.067408 4810 scope.go:117] "RemoveContainer" containerID="70e29becead84dc56488e8fb810fb7212a994a5f2603ea5f084d6f1f12ab4086" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.264595 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="911baddf-4619-4019-a86e-2b17f515b4c9" path="/var/lib/kubelet/pods/911baddf-4619-4019-a86e-2b17f515b4c9/volumes" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.282458 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Oct 09 00:28:53 crc kubenswrapper[4810]: E1009 00:28:53.282680 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="911baddf-4619-4019-a86e-2b17f515b4c9" containerName="docker-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.282691 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="911baddf-4619-4019-a86e-2b17f515b4c9" containerName="docker-build" Oct 09 00:28:53 crc kubenswrapper[4810]: E1009 00:28:53.282712 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="911baddf-4619-4019-a86e-2b17f515b4c9" containerName="manage-dockerfile" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.282718 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="911baddf-4619-4019-a86e-2b17f515b4c9" containerName="manage-dockerfile" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.282810 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="911baddf-4619-4019-a86e-2b17f515b4c9" containerName="docker-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.283573 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.287642 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.290428 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-sys-config" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.291915 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-global-ca" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.293644 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-bundle-2-ca" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.318101 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.456387 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.456898 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.456939 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.456976 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg8dc\" (UniqueName: \"kubernetes.io/projected/3237268c-9962-4c5a-943e-93e656911821-kube-api-access-gg8dc\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457011 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457040 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457299 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457361 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457449 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457510 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457636 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.457773 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559607 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559648 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559669 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559686 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559707 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559724 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559761 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559784 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559813 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559923 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg8dc\" (UniqueName: \"kubernetes.io/projected/3237268c-9962-4c5a-943e-93e656911821-kube-api-access-gg8dc\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559940 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.559956 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.560021 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-buildcachedir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.560544 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-system-configs\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.560597 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-node-pullsecrets\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.560782 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-build-blob-cache\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.561002 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-buildworkdir\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.561594 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-run\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.561883 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-root\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.562025 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.562745 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-proxy-ca-bundles\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.572311 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-pull\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.572681 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-push\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.580970 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg8dc\" (UniqueName: \"kubernetes.io/projected/3237268c-9962-4c5a-943e-93e656911821-kube-api-access-gg8dc\") pod \"smart-gateway-operator-bundle-2-build\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.606567 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:28:53 crc kubenswrapper[4810]: I1009 00:28:53.885034 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bundle-2-build"] Oct 09 00:28:53 crc kubenswrapper[4810]: W1009 00:28:53.892322 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3237268c_9962_4c5a_943e_93e656911821.slice/crio-3ffdce0e40678688baf5706a76d1e0cddfc39997a1f4aa6a43df4f64594bce22 WatchSource:0}: Error finding container 3ffdce0e40678688baf5706a76d1e0cddfc39997a1f4aa6a43df4f64594bce22: Status 404 returned error can't find the container with id 3ffdce0e40678688baf5706a76d1e0cddfc39997a1f4aa6a43df4f64594bce22 Oct 09 00:28:54 crc kubenswrapper[4810]: I1009 00:28:54.088532 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerStarted","Data":"3ffdce0e40678688baf5706a76d1e0cddfc39997a1f4aa6a43df4f64594bce22"} Oct 09 00:28:55 crc kubenswrapper[4810]: I1009 00:28:55.099904 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerStarted","Data":"1782ee3e15b5c0419880db782779a3512571787d58d6e15d0540d5153eb634f1"} Oct 09 00:28:56 crc kubenswrapper[4810]: I1009 00:28:56.109972 4810 generic.go:334] "Generic (PLEG): container finished" podID="3237268c-9962-4c5a-943e-93e656911821" containerID="1782ee3e15b5c0419880db782779a3512571787d58d6e15d0540d5153eb634f1" exitCode=0 Oct 09 00:28:56 crc kubenswrapper[4810]: I1009 00:28:56.110046 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerDied","Data":"1782ee3e15b5c0419880db782779a3512571787d58d6e15d0540d5153eb634f1"} Oct 09 00:28:57 crc kubenswrapper[4810]: I1009 00:28:57.116594 4810 generic.go:334] "Generic (PLEG): container finished" podID="3237268c-9962-4c5a-943e-93e656911821" containerID="89a723075056bf3a210d06e55c9f42008cff3e6c22937ae5a5cf05d45905f255" exitCode=0 Oct 09 00:28:57 crc kubenswrapper[4810]: I1009 00:28:57.116676 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerDied","Data":"89a723075056bf3a210d06e55c9f42008cff3e6c22937ae5a5cf05d45905f255"} Oct 09 00:28:57 crc kubenswrapper[4810]: I1009 00:28:57.173974 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bundle-2-build_3237268c-9962-4c5a-943e-93e656911821/manage-dockerfile/0.log" Oct 09 00:28:58 crc kubenswrapper[4810]: I1009 00:28:58.127661 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerStarted","Data":"ebb68abaf47f7bfee07509fddb63f8431d7dcdd9e234a44b1577edb8a945ff52"} Oct 09 00:28:58 crc kubenswrapper[4810]: I1009 00:28:58.166156 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-bundle-2-build" podStartSLOduration=5.166137411 podStartE2EDuration="5.166137411s" podCreationTimestamp="2025-10-09 00:28:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:28:58.163571207 +0000 UTC m=+1335.689210008" watchObservedRunningTime="2025-10-09 00:28:58.166137411 +0000 UTC m=+1335.691776112" Oct 09 00:29:01 crc kubenswrapper[4810]: I1009 00:29:01.154702 4810 generic.go:334] "Generic (PLEG): container finished" podID="3237268c-9962-4c5a-943e-93e656911821" containerID="ebb68abaf47f7bfee07509fddb63f8431d7dcdd9e234a44b1577edb8a945ff52" exitCode=0 Oct 09 00:29:01 crc kubenswrapper[4810]: I1009 00:29:01.155243 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerDied","Data":"ebb68abaf47f7bfee07509fddb63f8431d7dcdd9e234a44b1577edb8a945ff52"} Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.495884 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583028 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-build-blob-cache\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583128 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-run\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583257 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-ca-bundles\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583319 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-proxy-ca-bundles\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583399 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-pull\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583448 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-buildcachedir\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583533 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-system-configs\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583579 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-buildworkdir\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583624 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg8dc\" (UniqueName: \"kubernetes.io/projected/3237268c-9962-4c5a-943e-93e656911821-kube-api-access-gg8dc\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583642 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583677 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-node-pullsecrets\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583749 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-root\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.583804 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-push\") pod \"3237268c-9962-4c5a-943e-93e656911821\" (UID: \"3237268c-9962-4c5a-943e-93e656911821\") " Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.584323 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.584378 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.584452 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.584488 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.584766 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.585058 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.585094 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.585591 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.589951 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.590184 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.590735 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3237268c-9962-4c5a-943e-93e656911821-kube-api-access-gg8dc" (OuterVolumeSpecName: "kube-api-access-gg8dc") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "kube-api-access-gg8dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.596507 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "3237268c-9962-4c5a-943e-93e656911821" (UID: "3237268c-9962-4c5a-943e-93e656911821"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685488 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685535 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685558 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685576 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/3237268c-9962-4c5a-943e-93e656911821-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685592 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685607 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg8dc\" (UniqueName: \"kubernetes.io/projected/3237268c-9962-4c5a-943e-93e656911821-kube-api-access-gg8dc\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685621 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/3237268c-9962-4c5a-943e-93e656911821-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685635 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685650 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/3237268c-9962-4c5a-943e-93e656911821-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685663 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:02 crc kubenswrapper[4810]: I1009 00:29:02.685679 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/3237268c-9962-4c5a-943e-93e656911821-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:03 crc kubenswrapper[4810]: I1009 00:29:03.173677 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bundle-2-build" event={"ID":"3237268c-9962-4c5a-943e-93e656911821","Type":"ContainerDied","Data":"3ffdce0e40678688baf5706a76d1e0cddfc39997a1f4aa6a43df4f64594bce22"} Oct 09 00:29:03 crc kubenswrapper[4810]: I1009 00:29:03.173718 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ffdce0e40678688baf5706a76d1e0cddfc39997a1f4aa6a43df4f64594bce22" Oct 09 00:29:03 crc kubenswrapper[4810]: I1009 00:29:03.173781 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bundle-2-build" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.904903 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Oct 09 00:29:18 crc kubenswrapper[4810]: E1009 00:29:18.905682 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="git-clone" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.905695 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="git-clone" Oct 09 00:29:18 crc kubenswrapper[4810]: E1009 00:29:18.905718 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="manage-dockerfile" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.905726 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="manage-dockerfile" Oct 09 00:29:18 crc kubenswrapper[4810]: E1009 00:29:18.905740 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="docker-build" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.905750 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="docker-build" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.905904 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="3237268c-9962-4c5a-943e-93e656911821" containerName="docker-build" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.906867 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.910610 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-index-dockercfg" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.910910 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-ca" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.911756 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-sys-config" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.912544 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-98ntv" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.915592 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-global-ca" Oct 09 00:29:18 crc kubenswrapper[4810]: I1009 00:29:18.938553 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.045716 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.045773 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.045843 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.045881 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj4z5\" (UniqueName: \"kubernetes.io/projected/7832f2d8-4edc-45bf-961f-753eda1e7b0d-kube-api-access-dj4z5\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.045955 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.045988 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046015 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046124 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046190 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046226 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046261 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046283 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.046332 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147480 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147584 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147652 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147691 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj4z5\" (UniqueName: \"kubernetes.io/projected/7832f2d8-4edc-45bf-961f-753eda1e7b0d-kube-api-access-dj4z5\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147738 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147790 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147872 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147909 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.147944 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148084 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148092 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148094 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148329 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148223 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148388 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.148669 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.149110 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.149381 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.149485 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.149638 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.149701 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.150349 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.153961 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.154159 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.156331 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.175306 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj4z5\" (UniqueName: \"kubernetes.io/projected/7832f2d8-4edc-45bf-961f-753eda1e7b0d-kube-api-access-dj4z5\") pod \"service-telemetry-framework-index-1-build\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.227446 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:19 crc kubenswrapper[4810]: I1009 00:29:19.436492 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Oct 09 00:29:20 crc kubenswrapper[4810]: I1009 00:29:20.314406 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerStarted","Data":"e1d6ad385967eb6dd2412d6dc9b3f8593fb482f44d3e10980d8189f72931546e"} Oct 09 00:29:20 crc kubenswrapper[4810]: I1009 00:29:20.316040 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerStarted","Data":"7bc25a3643178fe98fe0b57e2904709add5b62d62e7ad15ee8d5f99a6602f8db"} Oct 09 00:29:20 crc kubenswrapper[4810]: E1009 00:29:20.423362 4810 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.177:54856->38.102.83.177:46105: read tcp 38.102.83.177:54856->38.102.83.177:46105: read: connection reset by peer Oct 09 00:29:21 crc kubenswrapper[4810]: I1009 00:29:21.323565 4810 generic.go:334] "Generic (PLEG): container finished" podID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerID="e1d6ad385967eb6dd2412d6dc9b3f8593fb482f44d3e10980d8189f72931546e" exitCode=0 Oct 09 00:29:21 crc kubenswrapper[4810]: I1009 00:29:21.323628 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerDied","Data":"e1d6ad385967eb6dd2412d6dc9b3f8593fb482f44d3e10980d8189f72931546e"} Oct 09 00:29:22 crc kubenswrapper[4810]: I1009 00:29:22.335436 4810 generic.go:334] "Generic (PLEG): container finished" podID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerID="b038ed35fadadcb95feb05bd826e009c914234a30c0e034faf00e48a347eabc1" exitCode=0 Oct 09 00:29:22 crc kubenswrapper[4810]: I1009 00:29:22.335601 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerDied","Data":"b038ed35fadadcb95feb05bd826e009c914234a30c0e034faf00e48a347eabc1"} Oct 09 00:29:22 crc kubenswrapper[4810]: I1009 00:29:22.398547 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_7832f2d8-4edc-45bf-961f-753eda1e7b0d/manage-dockerfile/0.log" Oct 09 00:29:23 crc kubenswrapper[4810]: I1009 00:29:23.349785 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerStarted","Data":"c0bb32ebcc6df93c827f6e22f196e01b100e35577c4b625246a193993ea2b6fb"} Oct 09 00:29:23 crc kubenswrapper[4810]: I1009 00:29:23.393141 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-index-1-build" podStartSLOduration=5.393113773 podStartE2EDuration="5.393113773s" podCreationTimestamp="2025-10-09 00:29:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:29:23.391350472 +0000 UTC m=+1360.916989263" watchObservedRunningTime="2025-10-09 00:29:23.393113773 +0000 UTC m=+1360.918752514" Oct 09 00:29:53 crc kubenswrapper[4810]: I1009 00:29:53.568932 4810 generic.go:334] "Generic (PLEG): container finished" podID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerID="c0bb32ebcc6df93c827f6e22f196e01b100e35577c4b625246a193993ea2b6fb" exitCode=0 Oct 09 00:29:53 crc kubenswrapper[4810]: I1009 00:29:53.569042 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerDied","Data":"c0bb32ebcc6df93c827f6e22f196e01b100e35577c4b625246a193993ea2b6fb"} Oct 09 00:29:54 crc kubenswrapper[4810]: I1009 00:29:54.951601 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110171 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-system-configs\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110239 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-node-pullsecrets\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110279 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildworkdir\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110357 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-push\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110404 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110446 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-root\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110497 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildcachedir\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110544 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-ca-bundles\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110581 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj4z5\" (UniqueName: \"kubernetes.io/projected/7832f2d8-4edc-45bf-961f-753eda1e7b0d-kube-api-access-dj4z5\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110615 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-run\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110647 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-proxy-ca-bundles\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110722 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-pull\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.110761 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-blob-cache\") pod \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\" (UID: \"7832f2d8-4edc-45bf-961f-753eda1e7b0d\") " Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.112639 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.112259 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.112707 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.113902 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.114673 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.115977 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.116160 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.118480 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-pull" (OuterVolumeSpecName: "builder-dockercfg-98ntv-pull") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "builder-dockercfg-98ntv-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.118784 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7832f2d8-4edc-45bf-961f-753eda1e7b0d-kube-api-access-dj4z5" (OuterVolumeSpecName: "kube-api-access-dj4z5") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "kube-api-access-dj4z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.119203 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-push" (OuterVolumeSpecName: "builder-dockercfg-98ntv-push") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "builder-dockercfg-98ntv-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.120461 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-service-telemetry-framework-index-dockercfg-user-build-volume" (OuterVolumeSpecName: "service-telemetry-framework-index-dockercfg-user-build-volume") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "service-telemetry-framework-index-dockercfg-user-build-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212463 4810 reconciler_common.go:293] "Volume detached for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-service-telemetry-framework-index-dockercfg-user-build-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212499 4810 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildcachedir\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212510 4810 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212520 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj4z5\" (UniqueName: \"kubernetes.io/projected/7832f2d8-4edc-45bf-961f-753eda1e7b0d-kube-api-access-dj4z5\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212531 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-run\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212540 4810 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212548 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-pull\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-pull\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212556 4810 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-system-configs\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212564 4810 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7832f2d8-4edc-45bf-961f-753eda1e7b0d-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212572 4810 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-buildworkdir\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.212580 4810 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-98ntv-push\" (UniqueName: \"kubernetes.io/secret/7832f2d8-4edc-45bf-961f-753eda1e7b0d-builder-dockercfg-98ntv-push\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.597134 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"7832f2d8-4edc-45bf-961f-753eda1e7b0d","Type":"ContainerDied","Data":"7bc25a3643178fe98fe0b57e2904709add5b62d62e7ad15ee8d5f99a6602f8db"} Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.597460 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7bc25a3643178fe98fe0b57e2904709add5b62d62e7ad15ee8d5f99a6602f8db" Oct 09 00:29:55 crc kubenswrapper[4810]: I1009 00:29:55.597254 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.001079 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.039217 4810 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-build-blob-cache\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.722961 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-zhqpm"] Oct 09 00:29:57 crc kubenswrapper[4810]: E1009 00:29:57.723337 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="git-clone" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.723363 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="git-clone" Oct 09 00:29:57 crc kubenswrapper[4810]: E1009 00:29:57.723391 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="manage-dockerfile" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.723404 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="manage-dockerfile" Oct 09 00:29:57 crc kubenswrapper[4810]: E1009 00:29:57.723433 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="docker-build" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.723445 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="docker-build" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.723620 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="7832f2d8-4edc-45bf-961f-753eda1e7b0d" containerName="docker-build" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.724427 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.727084 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"infrawatch-operators-dockercfg-q5c5v" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.730632 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-zhqpm"] Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.849339 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crjt5\" (UniqueName: \"kubernetes.io/projected/63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f-kube-api-access-crjt5\") pod \"infrawatch-operators-zhqpm\" (UID: \"63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f\") " pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.950311 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crjt5\" (UniqueName: \"kubernetes.io/projected/63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f-kube-api-access-crjt5\") pod \"infrawatch-operators-zhqpm\" (UID: \"63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f\") " pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:29:57 crc kubenswrapper[4810]: I1009 00:29:57.972255 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crjt5\" (UniqueName: \"kubernetes.io/projected/63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f-kube-api-access-crjt5\") pod \"infrawatch-operators-zhqpm\" (UID: \"63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f\") " pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:29:58 crc kubenswrapper[4810]: I1009 00:29:58.049043 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:29:58 crc kubenswrapper[4810]: I1009 00:29:58.113398 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "7832f2d8-4edc-45bf-961f-753eda1e7b0d" (UID: "7832f2d8-4edc-45bf-961f-753eda1e7b0d"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:29:58 crc kubenswrapper[4810]: I1009 00:29:58.152889 4810 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7832f2d8-4edc-45bf-961f-753eda1e7b0d-container-storage-root\") on node \"crc\" DevicePath \"\"" Oct 09 00:29:58 crc kubenswrapper[4810]: I1009 00:29:58.240500 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-zhqpm"] Oct 09 00:29:58 crc kubenswrapper[4810]: W1009 00:29:58.250891 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63ce67d3_9877_4e2f_a6d0_3d8d7130bc1f.slice/crio-06b89b256879949d193f05139358fbd572d7c0258ba8c8c42aa3d4fb91b71f7c WatchSource:0}: Error finding container 06b89b256879949d193f05139358fbd572d7c0258ba8c8c42aa3d4fb91b71f7c: Status 404 returned error can't find the container with id 06b89b256879949d193f05139358fbd572d7c0258ba8c8c42aa3d4fb91b71f7c Oct 09 00:29:58 crc kubenswrapper[4810]: I1009 00:29:58.253105 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 00:29:58 crc kubenswrapper[4810]: I1009 00:29:58.616303 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-zhqpm" event={"ID":"63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f","Type":"ContainerStarted","Data":"06b89b256879949d193f05139358fbd572d7c0258ba8c8c42aa3d4fb91b71f7c"} Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.124360 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v"] Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.125184 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.129590 4810 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.131943 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.135009 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v"] Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.288743 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzvgj\" (UniqueName: \"kubernetes.io/projected/77019956-59e7-410f-83fa-c14d8008875e-kube-api-access-nzvgj\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.288805 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77019956-59e7-410f-83fa-c14d8008875e-config-volume\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.288845 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77019956-59e7-410f-83fa-c14d8008875e-secret-volume\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.390148 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77019956-59e7-410f-83fa-c14d8008875e-config-volume\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.390470 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77019956-59e7-410f-83fa-c14d8008875e-secret-volume\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.390548 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzvgj\" (UniqueName: \"kubernetes.io/projected/77019956-59e7-410f-83fa-c14d8008875e-kube-api-access-nzvgj\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.392596 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77019956-59e7-410f-83fa-c14d8008875e-config-volume\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.397371 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77019956-59e7-410f-83fa-c14d8008875e-secret-volume\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.405713 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzvgj\" (UniqueName: \"kubernetes.io/projected/77019956-59e7-410f-83fa-c14d8008875e-kube-api-access-nzvgj\") pod \"collect-profiles-29332830-qpm7v\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:00 crc kubenswrapper[4810]: I1009 00:30:00.456683 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:08 crc kubenswrapper[4810]: I1009 00:30:08.690238 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v"] Oct 09 00:30:08 crc kubenswrapper[4810]: W1009 00:30:08.705653 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77019956_59e7_410f_83fa_c14d8008875e.slice/crio-29e5dae0de08692e26ff48faa331d4cc5cc0a079daee047f0d563ba30fe8ef6b WatchSource:0}: Error finding container 29e5dae0de08692e26ff48faa331d4cc5cc0a079daee047f0d563ba30fe8ef6b: Status 404 returned error can't find the container with id 29e5dae0de08692e26ff48faa331d4cc5cc0a079daee047f0d563ba30fe8ef6b Oct 09 00:30:09 crc kubenswrapper[4810]: I1009 00:30:09.688613 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-zhqpm" event={"ID":"63ce67d3-9877-4e2f-a6d0-3d8d7130bc1f","Type":"ContainerStarted","Data":"d63ccb6af67313fd3d034b4093683b90b6f960a2764925590eae7662583033df"} Oct 09 00:30:09 crc kubenswrapper[4810]: I1009 00:30:09.691679 4810 generic.go:334] "Generic (PLEG): container finished" podID="77019956-59e7-410f-83fa-c14d8008875e" containerID="6a2d271a35c2c1abf3600ebced16b45fb9ce8709ab03c48c83340acb72aad54c" exitCode=0 Oct 09 00:30:09 crc kubenswrapper[4810]: I1009 00:30:09.691745 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" event={"ID":"77019956-59e7-410f-83fa-c14d8008875e","Type":"ContainerDied","Data":"6a2d271a35c2c1abf3600ebced16b45fb9ce8709ab03c48c83340acb72aad54c"} Oct 09 00:30:09 crc kubenswrapper[4810]: I1009 00:30:09.691776 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" event={"ID":"77019956-59e7-410f-83fa-c14d8008875e","Type":"ContainerStarted","Data":"29e5dae0de08692e26ff48faa331d4cc5cc0a079daee047f0d563ba30fe8ef6b"} Oct 09 00:30:09 crc kubenswrapper[4810]: I1009 00:30:09.707689 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-zhqpm" podStartSLOduration=2.366027351 podStartE2EDuration="12.707660384s" podCreationTimestamp="2025-10-09 00:29:57 +0000 UTC" firstStartedPulling="2025-10-09 00:29:58.252920888 +0000 UTC m=+1395.778559589" lastFinishedPulling="2025-10-09 00:30:08.594553921 +0000 UTC m=+1406.120192622" observedRunningTime="2025-10-09 00:30:09.705288645 +0000 UTC m=+1407.230927406" watchObservedRunningTime="2025-10-09 00:30:09.707660384 +0000 UTC m=+1407.233299095" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.068581 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.231540 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77019956-59e7-410f-83fa-c14d8008875e-secret-volume\") pod \"77019956-59e7-410f-83fa-c14d8008875e\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.231655 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77019956-59e7-410f-83fa-c14d8008875e-config-volume\") pod \"77019956-59e7-410f-83fa-c14d8008875e\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.231748 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzvgj\" (UniqueName: \"kubernetes.io/projected/77019956-59e7-410f-83fa-c14d8008875e-kube-api-access-nzvgj\") pod \"77019956-59e7-410f-83fa-c14d8008875e\" (UID: \"77019956-59e7-410f-83fa-c14d8008875e\") " Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.232958 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77019956-59e7-410f-83fa-c14d8008875e-config-volume" (OuterVolumeSpecName: "config-volume") pod "77019956-59e7-410f-83fa-c14d8008875e" (UID: "77019956-59e7-410f-83fa-c14d8008875e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.239405 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77019956-59e7-410f-83fa-c14d8008875e-kube-api-access-nzvgj" (OuterVolumeSpecName: "kube-api-access-nzvgj") pod "77019956-59e7-410f-83fa-c14d8008875e" (UID: "77019956-59e7-410f-83fa-c14d8008875e"). InnerVolumeSpecName "kube-api-access-nzvgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.239782 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77019956-59e7-410f-83fa-c14d8008875e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "77019956-59e7-410f-83fa-c14d8008875e" (UID: "77019956-59e7-410f-83fa-c14d8008875e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.333141 4810 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/77019956-59e7-410f-83fa-c14d8008875e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.333208 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzvgj\" (UniqueName: \"kubernetes.io/projected/77019956-59e7-410f-83fa-c14d8008875e-kube-api-access-nzvgj\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.333230 4810 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/77019956-59e7-410f-83fa-c14d8008875e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.714688 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" event={"ID":"77019956-59e7-410f-83fa-c14d8008875e","Type":"ContainerDied","Data":"29e5dae0de08692e26ff48faa331d4cc5cc0a079daee047f0d563ba30fe8ef6b"} Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.714769 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29e5dae0de08692e26ff48faa331d4cc5cc0a079daee047f0d563ba30fe8ef6b" Oct 09 00:30:11 crc kubenswrapper[4810]: I1009 00:30:11.714794 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29332830-qpm7v" Oct 09 00:30:18 crc kubenswrapper[4810]: I1009 00:30:18.049879 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:30:18 crc kubenswrapper[4810]: I1009 00:30:18.050456 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:30:18 crc kubenswrapper[4810]: I1009 00:30:18.086690 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:30:18 crc kubenswrapper[4810]: I1009 00:30:18.816568 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-zhqpm" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.818168 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7"] Oct 09 00:30:30 crc kubenswrapper[4810]: E1009 00:30:30.819097 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77019956-59e7-410f-83fa-c14d8008875e" containerName="collect-profiles" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.819119 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="77019956-59e7-410f-83fa-c14d8008875e" containerName="collect-profiles" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.819376 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="77019956-59e7-410f-83fa-c14d8008875e" containerName="collect-profiles" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.820901 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.831010 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7"] Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.914545 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.914649 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65btm\" (UniqueName: \"kubernetes.io/projected/c8b0edf7-985c-40b1-8fa3-463677ea9b96-kube-api-access-65btm\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:30 crc kubenswrapper[4810]: I1009 00:30:30.914688 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.017244 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65btm\" (UniqueName: \"kubernetes.io/projected/c8b0edf7-985c-40b1-8fa3-463677ea9b96-kube-api-access-65btm\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.017342 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.017505 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.019960 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-bundle\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.028182 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-util\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.056171 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65btm\" (UniqueName: \"kubernetes.io/projected/c8b0edf7-985c-40b1-8fa3-463677ea9b96-kube-api-access-65btm\") pod \"500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.144430 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.339958 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7"] Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.611805 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n"] Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.614399 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.619001 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n"] Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.730032 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqmml\" (UniqueName: \"kubernetes.io/projected/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-kube-api-access-rqmml\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.730106 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.730258 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.831298 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqmml\" (UniqueName: \"kubernetes.io/projected/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-kube-api-access-rqmml\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.831357 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.831385 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.831787 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-bundle\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.832339 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-util\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.853813 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqmml\" (UniqueName: \"kubernetes.io/projected/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-kube-api-access-rqmml\") pod \"372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.878170 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" event={"ID":"c8b0edf7-985c-40b1-8fa3-463677ea9b96","Type":"ContainerStarted","Data":"067828f3d37d075443dd2d1166a0036cced60e07a48c0c3444f089e1063f4475"} Oct 09 00:30:31 crc kubenswrapper[4810]: I1009 00:30:31.930199 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:32 crc kubenswrapper[4810]: I1009 00:30:32.171508 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n"] Oct 09 00:30:32 crc kubenswrapper[4810]: W1009 00:30:32.184375 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1dd20b82_5e61_4b7a_a4b1_14b83ec4ee79.slice/crio-2c311adedbab720d5d13cb2fddd14456366d84794fa0d93e88d7ebebcf348cb9 WatchSource:0}: Error finding container 2c311adedbab720d5d13cb2fddd14456366d84794fa0d93e88d7ebebcf348cb9: Status 404 returned error can't find the container with id 2c311adedbab720d5d13cb2fddd14456366d84794fa0d93e88d7ebebcf348cb9 Oct 09 00:30:32 crc kubenswrapper[4810]: I1009 00:30:32.885184 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" event={"ID":"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79","Type":"ContainerStarted","Data":"2c311adedbab720d5d13cb2fddd14456366d84794fa0d93e88d7ebebcf348cb9"} Oct 09 00:30:34 crc kubenswrapper[4810]: I1009 00:30:34.897510 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" event={"ID":"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79","Type":"ContainerStarted","Data":"9e8ffdaae71773c56ec8d64c976861cf5b4c681517c4ebbc4b72a035ea539281"} Oct 09 00:30:34 crc kubenswrapper[4810]: I1009 00:30:34.898977 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" event={"ID":"c8b0edf7-985c-40b1-8fa3-463677ea9b96","Type":"ContainerStarted","Data":"5aa2540143c88b25d364c72882a234acbe6a6438bf170c4f8028aaec770bec2f"} Oct 09 00:30:35 crc kubenswrapper[4810]: I1009 00:30:35.910785 4810 generic.go:334] "Generic (PLEG): container finished" podID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerID="5aa2540143c88b25d364c72882a234acbe6a6438bf170c4f8028aaec770bec2f" exitCode=0 Oct 09 00:30:35 crc kubenswrapper[4810]: I1009 00:30:35.910892 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" event={"ID":"c8b0edf7-985c-40b1-8fa3-463677ea9b96","Type":"ContainerDied","Data":"5aa2540143c88b25d364c72882a234acbe6a6438bf170c4f8028aaec770bec2f"} Oct 09 00:30:35 crc kubenswrapper[4810]: I1009 00:30:35.922309 4810 generic.go:334] "Generic (PLEG): container finished" podID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerID="9e8ffdaae71773c56ec8d64c976861cf5b4c681517c4ebbc4b72a035ea539281" exitCode=0 Oct 09 00:30:35 crc kubenswrapper[4810]: I1009 00:30:35.922387 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" event={"ID":"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79","Type":"ContainerDied","Data":"9e8ffdaae71773c56ec8d64c976861cf5b4c681517c4ebbc4b72a035ea539281"} Oct 09 00:30:36 crc kubenswrapper[4810]: I1009 00:30:36.938247 4810 generic.go:334] "Generic (PLEG): container finished" podID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerID="4ddf33269134aa3c80f723d99f8b101cd1971b1050aaf8e8eca62dc96929e14f" exitCode=0 Oct 09 00:30:36 crc kubenswrapper[4810]: I1009 00:30:36.938366 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" event={"ID":"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79","Type":"ContainerDied","Data":"4ddf33269134aa3c80f723d99f8b101cd1971b1050aaf8e8eca62dc96929e14f"} Oct 09 00:30:36 crc kubenswrapper[4810]: I1009 00:30:36.943728 4810 generic.go:334] "Generic (PLEG): container finished" podID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerID="7866a2378594d3176e84f1e63f855ec8822cf7f6fb62bb57e1d5789c2f4a96cf" exitCode=0 Oct 09 00:30:36 crc kubenswrapper[4810]: I1009 00:30:36.943925 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" event={"ID":"c8b0edf7-985c-40b1-8fa3-463677ea9b96","Type":"ContainerDied","Data":"7866a2378594d3176e84f1e63f855ec8822cf7f6fb62bb57e1d5789c2f4a96cf"} Oct 09 00:30:37 crc kubenswrapper[4810]: I1009 00:30:37.958424 4810 generic.go:334] "Generic (PLEG): container finished" podID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerID="518b8018e5f3a491086019b7eeee0471ad88b5e6182dfc4d9ca093c52560fc66" exitCode=0 Oct 09 00:30:37 crc kubenswrapper[4810]: I1009 00:30:37.958544 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" event={"ID":"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79","Type":"ContainerDied","Data":"518b8018e5f3a491086019b7eeee0471ad88b5e6182dfc4d9ca093c52560fc66"} Oct 09 00:30:37 crc kubenswrapper[4810]: I1009 00:30:37.965212 4810 generic.go:334] "Generic (PLEG): container finished" podID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerID="03c48c2bfdf14254c220e0ba6a9b44756c584628d9631b2f9ea32d9e5ab102e9" exitCode=0 Oct 09 00:30:37 crc kubenswrapper[4810]: I1009 00:30:37.965291 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" event={"ID":"c8b0edf7-985c-40b1-8fa3-463677ea9b96","Type":"ContainerDied","Data":"03c48c2bfdf14254c220e0ba6a9b44756c584628d9631b2f9ea32d9e5ab102e9"} Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.308346 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.314787 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.484839 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqmml\" (UniqueName: \"kubernetes.io/projected/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-kube-api-access-rqmml\") pod \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.484898 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65btm\" (UniqueName: \"kubernetes.io/projected/c8b0edf7-985c-40b1-8fa3-463677ea9b96-kube-api-access-65btm\") pod \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.484937 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-bundle\") pod \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.484963 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-bundle\") pod \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.484989 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-util\") pod \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\" (UID: \"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79\") " Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.485052 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-util\") pod \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\" (UID: \"c8b0edf7-985c-40b1-8fa3-463677ea9b96\") " Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.485977 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-bundle" (OuterVolumeSpecName: "bundle") pod "1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" (UID: "1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.486084 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-bundle" (OuterVolumeSpecName: "bundle") pod "c8b0edf7-985c-40b1-8fa3-463677ea9b96" (UID: "c8b0edf7-985c-40b1-8fa3-463677ea9b96"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.493397 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-kube-api-access-rqmml" (OuterVolumeSpecName: "kube-api-access-rqmml") pod "1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" (UID: "1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79"). InnerVolumeSpecName "kube-api-access-rqmml". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.499149 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8b0edf7-985c-40b1-8fa3-463677ea9b96-kube-api-access-65btm" (OuterVolumeSpecName: "kube-api-access-65btm") pod "c8b0edf7-985c-40b1-8fa3-463677ea9b96" (UID: "c8b0edf7-985c-40b1-8fa3-463677ea9b96"). InnerVolumeSpecName "kube-api-access-65btm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.522645 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-util" (OuterVolumeSpecName: "util") pod "c8b0edf7-985c-40b1-8fa3-463677ea9b96" (UID: "c8b0edf7-985c-40b1-8fa3-463677ea9b96"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.524252 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-util" (OuterVolumeSpecName: "util") pod "1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" (UID: "1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.586718 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqmml\" (UniqueName: \"kubernetes.io/projected/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-kube-api-access-rqmml\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.586761 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65btm\" (UniqueName: \"kubernetes.io/projected/c8b0edf7-985c-40b1-8fa3-463677ea9b96-kube-api-access-65btm\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.586779 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.586794 4810 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.586809 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79-util\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.586843 4810 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c8b0edf7-985c-40b1-8fa3-463677ea9b96-util\") on node \"crc\" DevicePath \"\"" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.982975 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" event={"ID":"1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79","Type":"ContainerDied","Data":"2c311adedbab720d5d13cb2fddd14456366d84794fa0d93e88d7ebebcf348cb9"} Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.983017 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c311adedbab720d5d13cb2fddd14456366d84794fa0d93e88d7ebebcf348cb9" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.982996 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/372e7d5daac88c2e9a91443a2f508c8c20ad57bc41b1606ec960d61c0956p6n" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.985520 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" event={"ID":"c8b0edf7-985c-40b1-8fa3-463677ea9b96","Type":"ContainerDied","Data":"067828f3d37d075443dd2d1166a0036cced60e07a48c0c3444f089e1063f4475"} Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.985554 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/500c4f010310dad14c569d8fa2124fef1cf701af50ed1128cec4daf65as4jc7" Oct 09 00:30:39 crc kubenswrapper[4810]: I1009 00:30:39.985555 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="067828f3d37d075443dd2d1166a0036cced60e07a48c0c3444f089e1063f4475" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.313435 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r"] Oct 09 00:30:43 crc kubenswrapper[4810]: E1009 00:30:43.314103 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="pull" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314125 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="pull" Oct 09 00:30:43 crc kubenswrapper[4810]: E1009 00:30:43.314142 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="extract" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314156 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="extract" Oct 09 00:30:43 crc kubenswrapper[4810]: E1009 00:30:43.314169 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="extract" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314179 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="extract" Oct 09 00:30:43 crc kubenswrapper[4810]: E1009 00:30:43.314194 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="util" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314205 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="util" Oct 09 00:30:43 crc kubenswrapper[4810]: E1009 00:30:43.314374 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="pull" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314384 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="pull" Oct 09 00:30:43 crc kubenswrapper[4810]: E1009 00:30:43.314417 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="util" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314427 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="util" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314592 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dd20b82-5e61-4b7a-a4b1-14b83ec4ee79" containerName="extract" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.314611 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8b0edf7-985c-40b1-8fa3-463677ea9b96" containerName="extract" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.315269 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.316737 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-fkjj8" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.336230 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/09e03be9-1c87-4bbc-92ac-5f1de2289095-runner\") pod \"smart-gateway-operator-55b56d7ff-xqq2r\" (UID: \"09e03be9-1c87-4bbc-92ac-5f1de2289095\") " pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.336316 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csf5z\" (UniqueName: \"kubernetes.io/projected/09e03be9-1c87-4bbc-92ac-5f1de2289095-kube-api-access-csf5z\") pod \"smart-gateway-operator-55b56d7ff-xqq2r\" (UID: \"09e03be9-1c87-4bbc-92ac-5f1de2289095\") " pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.340256 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r"] Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.437853 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/09e03be9-1c87-4bbc-92ac-5f1de2289095-runner\") pod \"smart-gateway-operator-55b56d7ff-xqq2r\" (UID: \"09e03be9-1c87-4bbc-92ac-5f1de2289095\") " pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.437991 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csf5z\" (UniqueName: \"kubernetes.io/projected/09e03be9-1c87-4bbc-92ac-5f1de2289095-kube-api-access-csf5z\") pod \"smart-gateway-operator-55b56d7ff-xqq2r\" (UID: \"09e03be9-1c87-4bbc-92ac-5f1de2289095\") " pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.438622 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/09e03be9-1c87-4bbc-92ac-5f1de2289095-runner\") pod \"smart-gateway-operator-55b56d7ff-xqq2r\" (UID: \"09e03be9-1c87-4bbc-92ac-5f1de2289095\") " pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.462467 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csf5z\" (UniqueName: \"kubernetes.io/projected/09e03be9-1c87-4bbc-92ac-5f1de2289095-kube-api-access-csf5z\") pod \"smart-gateway-operator-55b56d7ff-xqq2r\" (UID: \"09e03be9-1c87-4bbc-92ac-5f1de2289095\") " pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:43 crc kubenswrapper[4810]: I1009 00:30:43.636955 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" Oct 09 00:30:44 crc kubenswrapper[4810]: I1009 00:30:44.119666 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r"] Oct 09 00:30:44 crc kubenswrapper[4810]: W1009 00:30:44.125307 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09e03be9_1c87_4bbc_92ac_5f1de2289095.slice/crio-c0bfcb2d5df0bba0aaab2d1a45c3de6534243f5286b4c04df6775896f93a6186 WatchSource:0}: Error finding container c0bfcb2d5df0bba0aaab2d1a45c3de6534243f5286b4c04df6775896f93a6186: Status 404 returned error can't find the container with id c0bfcb2d5df0bba0aaab2d1a45c3de6534243f5286b4c04df6775896f93a6186 Oct 09 00:30:45 crc kubenswrapper[4810]: I1009 00:30:45.019790 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" event={"ID":"09e03be9-1c87-4bbc-92ac-5f1de2289095","Type":"ContainerStarted","Data":"c0bfcb2d5df0bba0aaab2d1a45c3de6534243f5286b4c04df6775896f93a6186"} Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.565386 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-99856677b-nt8cc"] Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.566838 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.569169 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-ptqmb" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.582014 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-99856677b-nt8cc"] Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.689524 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z9vg\" (UniqueName: \"kubernetes.io/projected/1278b5d5-8f0f-4b50-9984-6e2bdbea7480-kube-api-access-8z9vg\") pod \"service-telemetry-operator-99856677b-nt8cc\" (UID: \"1278b5d5-8f0f-4b50-9984-6e2bdbea7480\") " pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.689633 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/1278b5d5-8f0f-4b50-9984-6e2bdbea7480-runner\") pod \"service-telemetry-operator-99856677b-nt8cc\" (UID: \"1278b5d5-8f0f-4b50-9984-6e2bdbea7480\") " pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.790755 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/1278b5d5-8f0f-4b50-9984-6e2bdbea7480-runner\") pod \"service-telemetry-operator-99856677b-nt8cc\" (UID: \"1278b5d5-8f0f-4b50-9984-6e2bdbea7480\") " pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.790856 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z9vg\" (UniqueName: \"kubernetes.io/projected/1278b5d5-8f0f-4b50-9984-6e2bdbea7480-kube-api-access-8z9vg\") pod \"service-telemetry-operator-99856677b-nt8cc\" (UID: \"1278b5d5-8f0f-4b50-9984-6e2bdbea7480\") " pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.791256 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/1278b5d5-8f0f-4b50-9984-6e2bdbea7480-runner\") pod \"service-telemetry-operator-99856677b-nt8cc\" (UID: \"1278b5d5-8f0f-4b50-9984-6e2bdbea7480\") " pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.807279 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z9vg\" (UniqueName: \"kubernetes.io/projected/1278b5d5-8f0f-4b50-9984-6e2bdbea7480-kube-api-access-8z9vg\") pod \"service-telemetry-operator-99856677b-nt8cc\" (UID: \"1278b5d5-8f0f-4b50-9984-6e2bdbea7480\") " pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:46 crc kubenswrapper[4810]: I1009 00:30:46.887991 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" Oct 09 00:30:47 crc kubenswrapper[4810]: I1009 00:30:47.113995 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-99856677b-nt8cc"] Oct 09 00:30:47 crc kubenswrapper[4810]: W1009 00:30:47.119033 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1278b5d5_8f0f_4b50_9984_6e2bdbea7480.slice/crio-e15714b77f6ebbc10465a21524ac7d1e762699717e9aa8b7ae6462a4ecab7ef9 WatchSource:0}: Error finding container e15714b77f6ebbc10465a21524ac7d1e762699717e9aa8b7ae6462a4ecab7ef9: Status 404 returned error can't find the container with id e15714b77f6ebbc10465a21524ac7d1e762699717e9aa8b7ae6462a4ecab7ef9 Oct 09 00:30:48 crc kubenswrapper[4810]: I1009 00:30:48.054360 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" event={"ID":"1278b5d5-8f0f-4b50-9984-6e2bdbea7480","Type":"ContainerStarted","Data":"e15714b77f6ebbc10465a21524ac7d1e762699717e9aa8b7ae6462a4ecab7ef9"} Oct 09 00:30:51 crc kubenswrapper[4810]: I1009 00:30:51.184710 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:30:51 crc kubenswrapper[4810]: I1009 00:30:51.185343 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:30:58 crc kubenswrapper[4810]: E1009 00:30:58.914316 4810 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Oct 09 00:30:58 crc kubenswrapper[4810]: E1009 00:30:58.914734 4810 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1759969688,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-csf5z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-55b56d7ff-xqq2r_service-telemetry(09e03be9-1c87-4bbc-92ac-5f1de2289095): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 00:30:58 crc kubenswrapper[4810]: E1009 00:30:58.915892 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" podUID="09e03be9-1c87-4bbc-92ac-5f1de2289095" Oct 09 00:30:59 crc kubenswrapper[4810]: E1009 00:30:59.139344 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" podUID="09e03be9-1c87-4bbc-92ac-5f1de2289095" Oct 09 00:31:03 crc kubenswrapper[4810]: I1009 00:31:03.189007 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" event={"ID":"1278b5d5-8f0f-4b50-9984-6e2bdbea7480","Type":"ContainerStarted","Data":"03b4fbce1fe27060ba97be2b108b30074f08c7cd2d6f782cf3ab8832f3bf41a2"} Oct 09 00:31:03 crc kubenswrapper[4810]: I1009 00:31:03.214002 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-99856677b-nt8cc" podStartSLOduration=2.053757865 podStartE2EDuration="17.213969598s" podCreationTimestamp="2025-10-09 00:30:46 +0000 UTC" firstStartedPulling="2025-10-09 00:30:47.121226836 +0000 UTC m=+1444.646865527" lastFinishedPulling="2025-10-09 00:31:02.281438569 +0000 UTC m=+1459.807077260" observedRunningTime="2025-10-09 00:31:03.210763915 +0000 UTC m=+1460.736402657" watchObservedRunningTime="2025-10-09 00:31:03.213969598 +0000 UTC m=+1460.739608339" Oct 09 00:31:13 crc kubenswrapper[4810]: I1009 00:31:13.265919 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" event={"ID":"09e03be9-1c87-4bbc-92ac-5f1de2289095","Type":"ContainerStarted","Data":"09ad4e244d34536028bcd98c4eec96cc9ac67293cb9e6a1191aa693162bfdefa"} Oct 09 00:31:13 crc kubenswrapper[4810]: I1009 00:31:13.282710 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-55b56d7ff-xqq2r" podStartSLOduration=1.602431502 podStartE2EDuration="30.282688555s" podCreationTimestamp="2025-10-09 00:30:43 +0000 UTC" firstStartedPulling="2025-10-09 00:30:44.12792826 +0000 UTC m=+1441.653566961" lastFinishedPulling="2025-10-09 00:31:12.808185273 +0000 UTC m=+1470.333824014" observedRunningTime="2025-10-09 00:31:13.281227423 +0000 UTC m=+1470.806866134" watchObservedRunningTime="2025-10-09 00:31:13.282688555 +0000 UTC m=+1470.808327256" Oct 09 00:31:21 crc kubenswrapper[4810]: I1009 00:31:21.184774 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:31:21 crc kubenswrapper[4810]: I1009 00:31:21.185439 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.009121 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-cn5ld"] Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.011166 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.014575 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.016587 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.016657 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.016894 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.017132 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-79bdl" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.017201 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.018097 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.036873 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-cn5ld"] Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.211807 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.211890 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg2bx\" (UniqueName: \"kubernetes.io/projected/a53e61eb-d1f3-4467-abe6-a25041614849-kube-api-access-rg2bx\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.211948 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.211985 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-config\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.212027 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-users\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.212061 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.212168 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.313907 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.314343 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.314634 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg2bx\" (UniqueName: \"kubernetes.io/projected/a53e61eb-d1f3-4467-abe6-a25041614849-kube-api-access-rg2bx\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.314979 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.315086 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-config\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.315193 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-users\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.315288 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.317043 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-config\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.322693 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.323518 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.325236 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-users\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.336839 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.338731 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.348666 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg2bx\" (UniqueName: \"kubernetes.io/projected/a53e61eb-d1f3-4467-abe6-a25041614849-kube-api-access-rg2bx\") pod \"default-interconnect-68864d46cb-cn5ld\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:29 crc kubenswrapper[4810]: I1009 00:31:29.636710 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:31:30 crc kubenswrapper[4810]: I1009 00:31:30.092469 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-cn5ld"] Oct 09 00:31:30 crc kubenswrapper[4810]: I1009 00:31:30.389521 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" event={"ID":"a53e61eb-d1f3-4467-abe6-a25041614849","Type":"ContainerStarted","Data":"8b28ccfc14478bc3f7e4457fd2cdadce9db703df42a86d130fa648ce06cf56a6"} Oct 09 00:31:36 crc kubenswrapper[4810]: I1009 00:31:36.427850 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" event={"ID":"a53e61eb-d1f3-4467-abe6-a25041614849","Type":"ContainerStarted","Data":"840a76de9ec4b37b0cb4d9f925825f096126b204d3279b09f60291d063716738"} Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.963993 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" podStartSLOduration=5.336177817 podStartE2EDuration="10.96395811s" podCreationTimestamp="2025-10-09 00:31:28 +0000 UTC" firstStartedPulling="2025-10-09 00:31:30.10407633 +0000 UTC m=+1487.629715081" lastFinishedPulling="2025-10-09 00:31:35.731856633 +0000 UTC m=+1493.257495374" observedRunningTime="2025-10-09 00:31:36.455208095 +0000 UTC m=+1493.980846816" watchObservedRunningTime="2025-10-09 00:31:38.96395811 +0000 UTC m=+1496.489596851" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.972238 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.975474 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.978605 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.979114 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.979613 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.981775 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.981878 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.982766 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-nwkx6" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.983183 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Oct 09 00:31:38 crc kubenswrapper[4810]: I1009 00:31:38.983320 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.003256 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153312 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153361 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9020904-a94e-479d-a497-75a587aed860-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153422 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153542 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f9020904-a94e-479d-a497-75a587aed860-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153661 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kvqf\" (UniqueName: \"kubernetes.io/projected/f9020904-a94e-479d-a497-75a587aed860-kube-api-access-6kvqf\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153710 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-config\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153751 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9020904-a94e-479d-a497-75a587aed860-config-out\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.153904 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9020904-a94e-479d-a497-75a587aed860-tls-assets\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.154009 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.154044 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-web-config\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260191 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f9020904-a94e-479d-a497-75a587aed860-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260354 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kvqf\" (UniqueName: \"kubernetes.io/projected/f9020904-a94e-479d-a497-75a587aed860-kube-api-access-6kvqf\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260406 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-config\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260446 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9020904-a94e-479d-a497-75a587aed860-config-out\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260486 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9020904-a94e-479d-a497-75a587aed860-tls-assets\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260538 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260572 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-web-config\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260626 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260659 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9020904-a94e-479d-a497-75a587aed860-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.260712 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: E1009 00:31:39.260950 4810 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Oct 09 00:31:39 crc kubenswrapper[4810]: E1009 00:31:39.261030 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls podName:f9020904-a94e-479d-a497-75a587aed860 nodeName:}" failed. No retries permitted until 2025-10-09 00:31:39.761003939 +0000 UTC m=+1497.286642680 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "f9020904-a94e-479d-a497-75a587aed860") : secret "default-prometheus-proxy-tls" not found Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.263861 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f9020904-a94e-479d-a497-75a587aed860-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.264974 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f9020904-a94e-479d-a497-75a587aed860-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.278672 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-config\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.278722 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.279068 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9020904-a94e-479d-a497-75a587aed860-config-out\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.279187 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9020904-a94e-479d-a497-75a587aed860-tls-assets\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.279309 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-web-config\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.284454 4810 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.284523 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/4d1814f00608c413401a61eb9af5b77b5a25c2bafdd5f70006fd3621493bbdf6/globalmount\"" pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.301297 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kvqf\" (UniqueName: \"kubernetes.io/projected/f9020904-a94e-479d-a497-75a587aed860-kube-api-access-6kvqf\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.330438 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8b0ecd4b-460d-474b-ace0-8923288f0015\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: I1009 00:31:39.769385 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:39 crc kubenswrapper[4810]: E1009 00:31:39.769745 4810 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Oct 09 00:31:39 crc kubenswrapper[4810]: E1009 00:31:39.769912 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls podName:f9020904-a94e-479d-a497-75a587aed860 nodeName:}" failed. No retries permitted until 2025-10-09 00:31:40.769801018 +0000 UTC m=+1498.295439759 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "f9020904-a94e-479d-a497-75a587aed860") : secret "default-prometheus-proxy-tls" not found Oct 09 00:31:40 crc kubenswrapper[4810]: I1009 00:31:40.783701 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:40 crc kubenswrapper[4810]: I1009 00:31:40.789342 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9020904-a94e-479d-a497-75a587aed860-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"f9020904-a94e-479d-a497-75a587aed860\") " pod="service-telemetry/prometheus-default-0" Oct 09 00:31:40 crc kubenswrapper[4810]: I1009 00:31:40.807173 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Oct 09 00:31:41 crc kubenswrapper[4810]: I1009 00:31:41.236084 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Oct 09 00:31:41 crc kubenswrapper[4810]: I1009 00:31:41.470218 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"f9020904-a94e-479d-a497-75a587aed860","Type":"ContainerStarted","Data":"5997fa1eb7a85f7b3fae740094cf4ac415c321bb644adc1301e8fca9cb0453dd"} Oct 09 00:31:47 crc kubenswrapper[4810]: I1009 00:31:47.537203 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"f9020904-a94e-479d-a497-75a587aed860","Type":"ContainerStarted","Data":"0a639a0f08d63c7a61ca302be84f0abfcb18979a2075fc8bad21bd5555f21814"} Oct 09 00:31:47 crc kubenswrapper[4810]: I1009 00:31:47.949976 4810 scope.go:117] "RemoveContainer" containerID="8c8579a4a2dc043878bb1bdc2989d903d7c346ae4f9e26608ab2e6cf69d7e278" Oct 09 00:31:47 crc kubenswrapper[4810]: I1009 00:31:47.995715 4810 scope.go:117] "RemoveContainer" containerID="5e92d851c2f743424d6db919b32e638a21212f3739a999a71b74cc36b340af79" Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.447382 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-g867m"] Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.448252 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.476151 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-g867m"] Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.600088 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8gmz\" (UniqueName: \"kubernetes.io/projected/34160a9e-a9d8-45f2-be37-889dfbe283a5-kube-api-access-m8gmz\") pod \"default-snmp-webhook-6856cfb745-g867m\" (UID: \"34160a9e-a9d8-45f2-be37-889dfbe283a5\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.702263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8gmz\" (UniqueName: \"kubernetes.io/projected/34160a9e-a9d8-45f2-be37-889dfbe283a5-kube-api-access-m8gmz\") pod \"default-snmp-webhook-6856cfb745-g867m\" (UID: \"34160a9e-a9d8-45f2-be37-889dfbe283a5\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.734396 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8gmz\" (UniqueName: \"kubernetes.io/projected/34160a9e-a9d8-45f2-be37-889dfbe283a5-kube-api-access-m8gmz\") pod \"default-snmp-webhook-6856cfb745-g867m\" (UID: \"34160a9e-a9d8-45f2-be37-889dfbe283a5\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" Oct 09 00:31:48 crc kubenswrapper[4810]: I1009 00:31:48.820867 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" Oct 09 00:31:49 crc kubenswrapper[4810]: I1009 00:31:49.066424 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-g867m"] Oct 09 00:31:49 crc kubenswrapper[4810]: I1009 00:31:49.555549 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" event={"ID":"34160a9e-a9d8-45f2-be37-889dfbe283a5","Type":"ContainerStarted","Data":"2b5b399452fb6bba268d008c803f368cd57ac1784b875f6b8131e8984a96c656"} Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.184247 4810 patch_prober.go:28] interesting pod/machine-config-daemon-6752w container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.184507 4810 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.184551 4810 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-6752w" Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.185152 4810 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d"} pod="openshift-machine-config-operator/machine-config-daemon-6752w" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.185225 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerName="machine-config-daemon" containerID="cri-o://50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" gracePeriod=600 Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.571486 4810 generic.go:334] "Generic (PLEG): container finished" podID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" exitCode=0 Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.571536 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerDied","Data":"50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d"} Oct 09 00:31:51 crc kubenswrapper[4810]: I1009 00:31:51.571570 4810 scope.go:117] "RemoveContainer" containerID="debdd6b5cbff428344b7777ba4f55fefe79d94121774d2baf5531c75de88d838" Oct 09 00:31:51 crc kubenswrapper[4810]: E1009 00:31:51.978075 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.110247 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.111600 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.115963 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-7gprf" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.116150 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.116348 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.116483 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.116644 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.118925 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.246886 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.246935 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2vks\" (UniqueName: \"kubernetes.io/projected/f9601525-d2da-4890-a464-42f2c4a7f0f0-kube-api-access-w2vks\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.246961 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-web-config\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.246991 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-config-volume\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.247006 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9601525-d2da-4890-a464-42f2c4a7f0f0-config-out\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.247032 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-342df374-1123-4138-b9d1-a2253e875f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-342df374-1123-4138-b9d1-a2253e875f75\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.247057 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9601525-d2da-4890-a464-42f2c4a7f0f0-tls-assets\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.247101 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358378 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358476 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358523 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2vks\" (UniqueName: \"kubernetes.io/projected/f9601525-d2da-4890-a464-42f2c4a7f0f0-kube-api-access-w2vks\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358579 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-web-config\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358653 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-config-volume\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358674 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9601525-d2da-4890-a464-42f2c4a7f0f0-config-out\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358741 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-342df374-1123-4138-b9d1-a2253e875f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-342df374-1123-4138-b9d1-a2253e875f75\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.358806 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9601525-d2da-4890-a464-42f2c4a7f0f0-tls-assets\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: E1009 00:31:52.359652 4810 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Oct 09 00:31:52 crc kubenswrapper[4810]: E1009 00:31:52.359946 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls podName:f9601525-d2da-4890-a464-42f2c4a7f0f0 nodeName:}" failed. No retries permitted until 2025-10-09 00:31:52.859929597 +0000 UTC m=+1510.385568298 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "f9601525-d2da-4890-a464-42f2c4a7f0f0") : secret "default-alertmanager-proxy-tls" not found Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.364903 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.365342 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-config-volume\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.365524 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9601525-d2da-4890-a464-42f2c4a7f0f0-tls-assets\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.367640 4810 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.367666 4810 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-342df374-1123-4138-b9d1-a2253e875f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-342df374-1123-4138-b9d1-a2253e875f75\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/79409e459f56433953f325529b79bee76dbd13bcc55c31b2a4418f722ca318e2/globalmount\"" pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.367770 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9601525-d2da-4890-a464-42f2c4a7f0f0-config-out\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.371385 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-web-config\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.383696 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2vks\" (UniqueName: \"kubernetes.io/projected/f9601525-d2da-4890-a464-42f2c4a7f0f0-kube-api-access-w2vks\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.386907 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-342df374-1123-4138-b9d1-a2253e875f75\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-342df374-1123-4138-b9d1-a2253e875f75\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.579960 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:31:52 crc kubenswrapper[4810]: E1009 00:31:52.580334 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:31:52 crc kubenswrapper[4810]: I1009 00:31:52.866204 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:52 crc kubenswrapper[4810]: E1009 00:31:52.866377 4810 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Oct 09 00:31:52 crc kubenswrapper[4810]: E1009 00:31:52.866595 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls podName:f9601525-d2da-4890-a464-42f2c4a7f0f0 nodeName:}" failed. No retries permitted until 2025-10-09 00:31:53.866571585 +0000 UTC m=+1511.392210326 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "f9601525-d2da-4890-a464-42f2c4a7f0f0") : secret "default-alertmanager-proxy-tls" not found Oct 09 00:31:53 crc kubenswrapper[4810]: I1009 00:31:53.883210 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:53 crc kubenswrapper[4810]: E1009 00:31:53.883372 4810 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Oct 09 00:31:53 crc kubenswrapper[4810]: E1009 00:31:53.883650 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls podName:f9601525-d2da-4890-a464-42f2c4a7f0f0 nodeName:}" failed. No retries permitted until 2025-10-09 00:31:55.883630099 +0000 UTC m=+1513.409268800 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "f9601525-d2da-4890-a464-42f2c4a7f0f0") : secret "default-alertmanager-proxy-tls" not found Oct 09 00:31:54 crc kubenswrapper[4810]: I1009 00:31:54.599890 4810 generic.go:334] "Generic (PLEG): container finished" podID="f9020904-a94e-479d-a497-75a587aed860" containerID="0a639a0f08d63c7a61ca302be84f0abfcb18979a2075fc8bad21bd5555f21814" exitCode=0 Oct 09 00:31:54 crc kubenswrapper[4810]: I1009 00:31:54.599929 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"f9020904-a94e-479d-a497-75a587aed860","Type":"ContainerDied","Data":"0a639a0f08d63c7a61ca302be84f0abfcb18979a2075fc8bad21bd5555f21814"} Oct 09 00:31:55 crc kubenswrapper[4810]: I1009 00:31:55.612818 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" event={"ID":"34160a9e-a9d8-45f2-be37-889dfbe283a5","Type":"ContainerStarted","Data":"49c7b9c026fc90fc3ecb1b362469026ea02e2d17b198b285ed21faed85d05eda"} Oct 09 00:31:55 crc kubenswrapper[4810]: I1009 00:31:55.654776 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-6856cfb745-g867m" podStartSLOduration=1.75355203 podStartE2EDuration="7.654747001s" podCreationTimestamp="2025-10-09 00:31:48 +0000 UTC" firstStartedPulling="2025-10-09 00:31:49.068770629 +0000 UTC m=+1506.594409350" lastFinishedPulling="2025-10-09 00:31:54.96996562 +0000 UTC m=+1512.495604321" observedRunningTime="2025-10-09 00:31:55.638090711 +0000 UTC m=+1513.163729452" watchObservedRunningTime="2025-10-09 00:31:55.654747001 +0000 UTC m=+1513.180385732" Oct 09 00:31:55 crc kubenswrapper[4810]: I1009 00:31:55.911713 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:55 crc kubenswrapper[4810]: I1009 00:31:55.918641 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/f9601525-d2da-4890-a464-42f2c4a7f0f0-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"f9601525-d2da-4890-a464-42f2c4a7f0f0\") " pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:56 crc kubenswrapper[4810]: I1009 00:31:56.042015 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Oct 09 00:31:56 crc kubenswrapper[4810]: I1009 00:31:56.478920 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Oct 09 00:31:56 crc kubenswrapper[4810]: W1009 00:31:56.487793 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9601525_d2da_4890_a464_42f2c4a7f0f0.slice/crio-a61d1fe52c88cc77bc04fa9e5d9630aa0c06378aa8a7ee927fb79217c187adac WatchSource:0}: Error finding container a61d1fe52c88cc77bc04fa9e5d9630aa0c06378aa8a7ee927fb79217c187adac: Status 404 returned error can't find the container with id a61d1fe52c88cc77bc04fa9e5d9630aa0c06378aa8a7ee927fb79217c187adac Oct 09 00:31:56 crc kubenswrapper[4810]: I1009 00:31:56.622247 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"f9601525-d2da-4890-a464-42f2c4a7f0f0","Type":"ContainerStarted","Data":"a61d1fe52c88cc77bc04fa9e5d9630aa0c06378aa8a7ee927fb79217c187adac"} Oct 09 00:31:58 crc kubenswrapper[4810]: I1009 00:31:58.641063 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"f9601525-d2da-4890-a464-42f2c4a7f0f0","Type":"ContainerStarted","Data":"3ae7907d6425c6cc70bfed8a969456a826b55f5da6fd2c15e6553ec7e3934f38"} Oct 09 00:31:59 crc kubenswrapper[4810]: I1009 00:31:59.653097 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"f9020904-a94e-479d-a497-75a587aed860","Type":"ContainerStarted","Data":"e7f720cd843926f5ee9c89c0f643df7f1448f7fd8c86b91a1afa6b4a5c7e9b05"} Oct 09 00:32:02 crc kubenswrapper[4810]: I1009 00:32:02.675012 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"f9020904-a94e-479d-a497-75a587aed860","Type":"ContainerStarted","Data":"bb46ea773e38b7762ecc41ac4613185a30583b7f35bfca09b5215b2b23b7dabb"} Oct 09 00:32:03 crc kubenswrapper[4810]: I1009 00:32:03.258692 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:32:03 crc kubenswrapper[4810]: E1009 00:32:03.259055 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.838529 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2"] Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.840283 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.843797 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.843847 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.843862 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-xxflr" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.843792 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.856983 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2"] Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.933479 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.933527 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/94243b4a-0039-4087-9a1e-14199e488715-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.933552 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/94243b4a-0039-4087-9a1e-14199e488715-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.933570 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:04 crc kubenswrapper[4810]: I1009 00:32:04.933589 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccdgl\" (UniqueName: \"kubernetes.io/projected/94243b4a-0039-4087-9a1e-14199e488715-kube-api-access-ccdgl\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.034997 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/94243b4a-0039-4087-9a1e-14199e488715-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.035053 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.035082 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccdgl\" (UniqueName: \"kubernetes.io/projected/94243b4a-0039-4087-9a1e-14199e488715-kube-api-access-ccdgl\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.035190 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.035225 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/94243b4a-0039-4087-9a1e-14199e488715-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.035766 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/94243b4a-0039-4087-9a1e-14199e488715-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.036555 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/94243b4a-0039-4087-9a1e-14199e488715-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: E1009 00:32:05.036645 4810 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Oct 09 00:32:05 crc kubenswrapper[4810]: E1009 00:32:05.036696 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls podName:94243b4a-0039-4087-9a1e-14199e488715 nodeName:}" failed. No retries permitted until 2025-10-09 00:32:05.536678102 +0000 UTC m=+1523.062316803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" (UID: "94243b4a-0039-4087-9a1e-14199e488715") : secret "default-cloud1-coll-meter-proxy-tls" not found Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.048943 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.063557 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccdgl\" (UniqueName: \"kubernetes.io/projected/94243b4a-0039-4087-9a1e-14199e488715-kube-api-access-ccdgl\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.541775 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:05 crc kubenswrapper[4810]: E1009 00:32:05.542175 4810 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Oct 09 00:32:05 crc kubenswrapper[4810]: E1009 00:32:05.542631 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls podName:94243b4a-0039-4087-9a1e-14199e488715 nodeName:}" failed. No retries permitted until 2025-10-09 00:32:06.54260622 +0000 UTC m=+1524.068244931 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" (UID: "94243b4a-0039-4087-9a1e-14199e488715") : secret "default-cloud1-coll-meter-proxy-tls" not found Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.699063 4810 generic.go:334] "Generic (PLEG): container finished" podID="f9601525-d2da-4890-a464-42f2c4a7f0f0" containerID="3ae7907d6425c6cc70bfed8a969456a826b55f5da6fd2c15e6553ec7e3934f38" exitCode=0 Oct 09 00:32:05 crc kubenswrapper[4810]: I1009 00:32:05.699104 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"f9601525-d2da-4890-a464-42f2c4a7f0f0","Type":"ContainerDied","Data":"3ae7907d6425c6cc70bfed8a969456a826b55f5da6fd2c15e6553ec7e3934f38"} Oct 09 00:32:06 crc kubenswrapper[4810]: I1009 00:32:06.557359 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:06 crc kubenswrapper[4810]: I1009 00:32:06.563383 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/94243b4a-0039-4087-9a1e-14199e488715-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2\" (UID: \"94243b4a-0039-4087-9a1e-14199e488715\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:06 crc kubenswrapper[4810]: I1009 00:32:06.658955 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.070585 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g"] Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.071679 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.073962 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.075799 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.093185 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g"] Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.165670 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/1dce5a91-a66f-4936-9a04-05cd00df18e5-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.165724 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.165753 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/1dce5a91-a66f-4936-9a04-05cd00df18e5-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.165852 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.165892 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv2cb\" (UniqueName: \"kubernetes.io/projected/1dce5a91-a66f-4936-9a04-05cd00df18e5-kube-api-access-tv2cb\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.267053 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/1dce5a91-a66f-4936-9a04-05cd00df18e5-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.267106 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.267130 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/1dce5a91-a66f-4936-9a04-05cd00df18e5-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.267172 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.267207 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv2cb\" (UniqueName: \"kubernetes.io/projected/1dce5a91-a66f-4936-9a04-05cd00df18e5-kube-api-access-tv2cb\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.267965 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/1dce5a91-a66f-4936-9a04-05cd00df18e5-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: E1009 00:32:07.268669 4810 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 09 00:32:07 crc kubenswrapper[4810]: E1009 00:32:07.268723 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls podName:1dce5a91-a66f-4936-9a04-05cd00df18e5 nodeName:}" failed. No retries permitted until 2025-10-09 00:32:07.768706534 +0000 UTC m=+1525.294345225 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" (UID: "1dce5a91-a66f-4936-9a04-05cd00df18e5") : secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.269564 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/1dce5a91-a66f-4936-9a04-05cd00df18e5-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.271345 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.286310 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv2cb\" (UniqueName: \"kubernetes.io/projected/1dce5a91-a66f-4936-9a04-05cd00df18e5-kube-api-access-tv2cb\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: I1009 00:32:07.773613 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:07 crc kubenswrapper[4810]: E1009 00:32:07.773863 4810 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 09 00:32:07 crc kubenswrapper[4810]: E1009 00:32:07.773915 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls podName:1dce5a91-a66f-4936-9a04-05cd00df18e5 nodeName:}" failed. No retries permitted until 2025-10-09 00:32:08.773901761 +0000 UTC m=+1526.299540462 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" (UID: "1dce5a91-a66f-4936-9a04-05cd00df18e5") : secret "default-cloud1-ceil-meter-proxy-tls" not found Oct 09 00:32:08 crc kubenswrapper[4810]: I1009 00:32:08.787677 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:08 crc kubenswrapper[4810]: I1009 00:32:08.791952 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/1dce5a91-a66f-4936-9a04-05cd00df18e5-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g\" (UID: \"1dce5a91-a66f-4936-9a04-05cd00df18e5\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:08 crc kubenswrapper[4810]: I1009 00:32:08.914145 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" Oct 09 00:32:10 crc kubenswrapper[4810]: I1009 00:32:10.630856 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g"] Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.342652 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw"] Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.344679 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.346849 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.347057 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.351605 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw"] Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.428630 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.428687 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2tjd\" (UniqueName: \"kubernetes.io/projected/d7a9475e-1b07-437e-9837-67c2a0b43887-kube-api-access-s2tjd\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.428724 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.428743 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/d7a9475e-1b07-437e-9837-67c2a0b43887-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.428793 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/d7a9475e-1b07-437e-9837-67c2a0b43887-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.490582 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2"] Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.529692 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/d7a9475e-1b07-437e-9837-67c2a0b43887-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.529994 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.530091 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2tjd\" (UniqueName: \"kubernetes.io/projected/d7a9475e-1b07-437e-9837-67c2a0b43887-kube-api-access-s2tjd\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.530187 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.530263 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/d7a9475e-1b07-437e-9837-67c2a0b43887-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: E1009 00:32:11.530266 4810 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Oct 09 00:32:11 crc kubenswrapper[4810]: E1009 00:32:11.530515 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls podName:d7a9475e-1b07-437e-9837-67c2a0b43887 nodeName:}" failed. No retries permitted until 2025-10-09 00:32:12.030497949 +0000 UTC m=+1529.556136650 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" (UID: "d7a9475e-1b07-437e-9837-67c2a0b43887") : secret "default-cloud1-sens-meter-proxy-tls" not found Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.530198 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/d7a9475e-1b07-437e-9837-67c2a0b43887-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.530876 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/d7a9475e-1b07-437e-9837-67c2a0b43887-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.537494 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.549623 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2tjd\" (UniqueName: \"kubernetes.io/projected/d7a9475e-1b07-437e-9837-67c2a0b43887-kube-api-access-s2tjd\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.738273 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"f9020904-a94e-479d-a497-75a587aed860","Type":"ContainerStarted","Data":"11cf93c68ca7b74af2b188628150a4f317a2df216c753a27b3914e7b058099bf"} Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.739427 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerStarted","Data":"6665ac41598f4e4821484427afae3186c501627a21f5ae9dcce685f80e95ef52"} Oct 09 00:32:11 crc kubenswrapper[4810]: I1009 00:32:11.760966 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.890176972 podStartE2EDuration="34.760947429s" podCreationTimestamp="2025-10-09 00:31:37 +0000 UTC" firstStartedPulling="2025-10-09 00:31:41.246423114 +0000 UTC m=+1498.772061835" lastFinishedPulling="2025-10-09 00:32:11.117193591 +0000 UTC m=+1528.642832292" observedRunningTime="2025-10-09 00:32:11.758402745 +0000 UTC m=+1529.284041446" watchObservedRunningTime="2025-10-09 00:32:11.760947429 +0000 UTC m=+1529.286586130" Oct 09 00:32:12 crc kubenswrapper[4810]: I1009 00:32:12.036640 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:12 crc kubenswrapper[4810]: E1009 00:32:12.036805 4810 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Oct 09 00:32:12 crc kubenswrapper[4810]: E1009 00:32:12.037099 4810 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls podName:d7a9475e-1b07-437e-9837-67c2a0b43887 nodeName:}" failed. No retries permitted until 2025-10-09 00:32:13.037076515 +0000 UTC m=+1530.562715216 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" (UID: "d7a9475e-1b07-437e-9837-67c2a0b43887") : secret "default-cloud1-sens-meter-proxy-tls" not found Oct 09 00:32:12 crc kubenswrapper[4810]: W1009 00:32:12.190785 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94243b4a_0039_4087_9a1e_14199e488715.slice/crio-ece9af87c441623a0d1f7603651837d4438dbb2d836ade2a647956b02e0753c3 WatchSource:0}: Error finding container ece9af87c441623a0d1f7603651837d4438dbb2d836ade2a647956b02e0753c3: Status 404 returned error can't find the container with id ece9af87c441623a0d1f7603651837d4438dbb2d836ade2a647956b02e0753c3 Oct 09 00:32:12 crc kubenswrapper[4810]: I1009 00:32:12.748518 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"f9601525-d2da-4890-a464-42f2c4a7f0f0","Type":"ContainerStarted","Data":"0e20a4fc19e5411d5875897b8e8dcc72ec9029512db912d29ce403d849302b49"} Oct 09 00:32:12 crc kubenswrapper[4810]: I1009 00:32:12.752867 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerStarted","Data":"ece9af87c441623a0d1f7603651837d4438dbb2d836ade2a647956b02e0753c3"} Oct 09 00:32:12 crc kubenswrapper[4810]: I1009 00:32:12.761144 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerStarted","Data":"e8b7eea3e84b1c5973a996102da903fb0a11f576c5aa32602d3b30e3e0d65855"} Oct 09 00:32:13 crc kubenswrapper[4810]: I1009 00:32:13.049569 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:13 crc kubenswrapper[4810]: I1009 00:32:13.053453 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/d7a9475e-1b07-437e-9837-67c2a0b43887-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-775pw\" (UID: \"d7a9475e-1b07-437e-9837-67c2a0b43887\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:13 crc kubenswrapper[4810]: I1009 00:32:13.175499 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" Oct 09 00:32:13 crc kubenswrapper[4810]: I1009 00:32:13.451295 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw"] Oct 09 00:32:13 crc kubenswrapper[4810]: W1009 00:32:13.465512 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7a9475e_1b07_437e_9837_67c2a0b43887.slice/crio-7fc581a4cd2b233961fadc02fb64b4fd30ae0bcadff896221853a40efdb7ef60 WatchSource:0}: Error finding container 7fc581a4cd2b233961fadc02fb64b4fd30ae0bcadff896221853a40efdb7ef60: Status 404 returned error can't find the container with id 7fc581a4cd2b233961fadc02fb64b4fd30ae0bcadff896221853a40efdb7ef60 Oct 09 00:32:13 crc kubenswrapper[4810]: I1009 00:32:13.768765 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerStarted","Data":"7fc581a4cd2b233961fadc02fb64b4fd30ae0bcadff896221853a40efdb7ef60"} Oct 09 00:32:13 crc kubenswrapper[4810]: I1009 00:32:13.771177 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerStarted","Data":"21ca90cdd947c5629082ca48274879122cb005ca0327d485d8f39e973b29c07f"} Oct 09 00:32:14 crc kubenswrapper[4810]: I1009 00:32:14.781512 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"f9601525-d2da-4890-a464-42f2c4a7f0f0","Type":"ContainerStarted","Data":"f6354d7de5606044182b32b0545285bc2524a4e855b1587c93b50e2d9c6f0029"} Oct 09 00:32:14 crc kubenswrapper[4810]: I1009 00:32:14.783206 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerStarted","Data":"858f660079eec639517b006de0da7cb36131509c489ad60c1fa4d502f4f1fdfb"} Oct 09 00:32:15 crc kubenswrapper[4810]: I1009 00:32:15.792302 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"f9601525-d2da-4890-a464-42f2c4a7f0f0","Type":"ContainerStarted","Data":"e2610bc819bcd48d504fbf99a1f33d17bd238c45f3f477cb70aeadec578c7a99"} Oct 09 00:32:15 crc kubenswrapper[4810]: I1009 00:32:15.808754 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.154243 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=17.668073183 podStartE2EDuration="27.154223558s" podCreationTimestamp="2025-10-09 00:31:51 +0000 UTC" firstStartedPulling="2025-10-09 00:32:05.701684463 +0000 UTC m=+1523.227323154" lastFinishedPulling="2025-10-09 00:32:15.187834828 +0000 UTC m=+1532.713473529" observedRunningTime="2025-10-09 00:32:15.825164331 +0000 UTC m=+1533.350803022" watchObservedRunningTime="2025-10-09 00:32:18.154223558 +0000 UTC m=+1535.679862279" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.155231 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs"] Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.159174 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.162247 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.162766 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.168124 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs"] Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.216661 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ff87281-48cc-432c-8bb8-fe29bf27f7da-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.217034 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/5ff87281-48cc-432c-8bb8-fe29bf27f7da-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.217194 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ff87281-48cc-432c-8bb8-fe29bf27f7da-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.217332 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7r5z\" (UniqueName: \"kubernetes.io/projected/5ff87281-48cc-432c-8bb8-fe29bf27f7da-kube-api-access-n7r5z\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.254739 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:32:18 crc kubenswrapper[4810]: E1009 00:32:18.254943 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.318519 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ff87281-48cc-432c-8bb8-fe29bf27f7da-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.318592 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/5ff87281-48cc-432c-8bb8-fe29bf27f7da-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.318660 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ff87281-48cc-432c-8bb8-fe29bf27f7da-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.318693 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7r5z\" (UniqueName: \"kubernetes.io/projected/5ff87281-48cc-432c-8bb8-fe29bf27f7da-kube-api-access-n7r5z\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.320101 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5ff87281-48cc-432c-8bb8-fe29bf27f7da-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.322296 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5ff87281-48cc-432c-8bb8-fe29bf27f7da-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.325483 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/5ff87281-48cc-432c-8bb8-fe29bf27f7da-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.336879 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7r5z\" (UniqueName: \"kubernetes.io/projected/5ff87281-48cc-432c-8bb8-fe29bf27f7da-kube-api-access-n7r5z\") pod \"default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs\" (UID: \"5ff87281-48cc-432c-8bb8-fe29bf27f7da\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:18 crc kubenswrapper[4810]: I1009 00:32:18.488213 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.093627 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9"] Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.095145 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.096922 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.104122 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9"] Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.145171 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs"] Oct 09 00:32:19 crc kubenswrapper[4810]: W1009 00:32:19.175338 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ff87281_48cc_432c_8bb8_fe29bf27f7da.slice/crio-fc0cefacc1dcf2da217ac2d9a91bfb41a7d06a3115fe1db98b47a1278fcff49a WatchSource:0}: Error finding container fc0cefacc1dcf2da217ac2d9a91bfb41a7d06a3115fe1db98b47a1278fcff49a: Status 404 returned error can't find the container with id fc0cefacc1dcf2da217ac2d9a91bfb41a7d06a3115fe1db98b47a1278fcff49a Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.232428 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/417484c8-987f-4ac7-b8ca-7de8c2dfc404-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.232479 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/417484c8-987f-4ac7-b8ca-7de8c2dfc404-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.232499 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/417484c8-987f-4ac7-b8ca-7de8c2dfc404-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.232612 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb27q\" (UniqueName: \"kubernetes.io/projected/417484c8-987f-4ac7-b8ca-7de8c2dfc404-kube-api-access-kb27q\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.333700 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb27q\" (UniqueName: \"kubernetes.io/projected/417484c8-987f-4ac7-b8ca-7de8c2dfc404-kube-api-access-kb27q\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.334472 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/417484c8-987f-4ac7-b8ca-7de8c2dfc404-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.334548 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/417484c8-987f-4ac7-b8ca-7de8c2dfc404-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.334582 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/417484c8-987f-4ac7-b8ca-7de8c2dfc404-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.336258 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/417484c8-987f-4ac7-b8ca-7de8c2dfc404-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.336939 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/417484c8-987f-4ac7-b8ca-7de8c2dfc404-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.342385 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/417484c8-987f-4ac7-b8ca-7de8c2dfc404-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.348995 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb27q\" (UniqueName: \"kubernetes.io/projected/417484c8-987f-4ac7-b8ca-7de8c2dfc404-kube-api-access-kb27q\") pod \"default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9\" (UID: \"417484c8-987f-4ac7-b8ca-7de8c2dfc404\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.428371 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.819857 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerStarted","Data":"6d92699b79ea21ef8e9282dbe3bea5d94e1d24d71e208d58ea098dff30726373"} Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.822437 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerStarted","Data":"6e1a13cd2183514a2835eda6fc9aa1b0ce7e159d71861f106c556f29d93f0349"} Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.825866 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerStarted","Data":"db2e12849cfec6dfbb3317bbcbe3c92801cdc0495f560ec1e7b831aa677f597a"} Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.827605 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerStarted","Data":"a5b7db755d1612899f7a556ce93d6dcec06e566827648d323c6808010588ee4b"} Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.827626 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerStarted","Data":"fc0cefacc1dcf2da217ac2d9a91bfb41a7d06a3115fe1db98b47a1278fcff49a"} Oct 09 00:32:19 crc kubenswrapper[4810]: I1009 00:32:19.837935 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9"] Oct 09 00:32:19 crc kubenswrapper[4810]: W1009 00:32:19.841275 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod417484c8_987f_4ac7_b8ca_7de8c2dfc404.slice/crio-a4c7de8a27d61b6daa8bb4a2539719d823801470955957de0dde03f7cf34e7d9 WatchSource:0}: Error finding container a4c7de8a27d61b6daa8bb4a2539719d823801470955957de0dde03f7cf34e7d9: Status 404 returned error can't find the container with id a4c7de8a27d61b6daa8bb4a2539719d823801470955957de0dde03f7cf34e7d9 Oct 09 00:32:20 crc kubenswrapper[4810]: I1009 00:32:20.840726 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerStarted","Data":"a9e707569022d6f724ef186602a88b57a9bbba584c3fb8b60aa77e526ba25eaf"} Oct 09 00:32:20 crc kubenswrapper[4810]: I1009 00:32:20.841077 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerStarted","Data":"a4c7de8a27d61b6daa8bb4a2539719d823801470955957de0dde03f7cf34e7d9"} Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.876098 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerStarted","Data":"3be065864c418a6e0f0ed1aeac7d75a00c87a44ddf7c342cad4b162763069195"} Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.879901 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerStarted","Data":"5296f748d2bdb41cddb51831d58b96ddfffb530eac89b525aa542e8f9f5e6dc6"} Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.882671 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerStarted","Data":"7ec6b67bd7556854873da1ecbbd73de700d46e309f2b5cabd9e75e171cdfd2ed"} Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.884990 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerStarted","Data":"fdca1b41e644c5e7ce02cb4731a1b3bd30816bdacdc28a8b0fcc605ebfbb68ce"} Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.887269 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerStarted","Data":"abf1c5cee264bfc9ebbb48d61fcc9c36dd034dd370bf32298771783505bb464f"} Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.900099 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" podStartSLOduration=9.266037595 podStartE2EDuration="20.900076847s" podCreationTimestamp="2025-10-09 00:32:04 +0000 UTC" firstStartedPulling="2025-10-09 00:32:12.432016684 +0000 UTC m=+1529.957655385" lastFinishedPulling="2025-10-09 00:32:24.066055926 +0000 UTC m=+1541.591694637" observedRunningTime="2025-10-09 00:32:24.895097133 +0000 UTC m=+1542.420735874" watchObservedRunningTime="2025-10-09 00:32:24.900076847 +0000 UTC m=+1542.425715578" Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.918020 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" podStartSLOduration=4.937641281 podStartE2EDuration="17.917999603s" podCreationTimestamp="2025-10-09 00:32:07 +0000 UTC" firstStartedPulling="2025-10-09 00:32:11.08661994 +0000 UTC m=+1528.612258641" lastFinishedPulling="2025-10-09 00:32:24.066978252 +0000 UTC m=+1541.592616963" observedRunningTime="2025-10-09 00:32:24.916176021 +0000 UTC m=+1542.441814722" watchObservedRunningTime="2025-10-09 00:32:24.917999603 +0000 UTC m=+1542.443638314" Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.945062 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" podStartSLOduration=3.279649581 podStartE2EDuration="13.945040702s" podCreationTimestamp="2025-10-09 00:32:11 +0000 UTC" firstStartedPulling="2025-10-09 00:32:13.468736336 +0000 UTC m=+1530.994375037" lastFinishedPulling="2025-10-09 00:32:24.134127457 +0000 UTC m=+1541.659766158" observedRunningTime="2025-10-09 00:32:24.939982116 +0000 UTC m=+1542.465620827" watchObservedRunningTime="2025-10-09 00:32:24.945040702 +0000 UTC m=+1542.470679403" Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.971064 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" podStartSLOduration=1.708585926 podStartE2EDuration="5.971043671s" podCreationTimestamp="2025-10-09 00:32:19 +0000 UTC" firstStartedPulling="2025-10-09 00:32:19.853676134 +0000 UTC m=+1537.379314835" lastFinishedPulling="2025-10-09 00:32:24.116133879 +0000 UTC m=+1541.641772580" observedRunningTime="2025-10-09 00:32:24.967021756 +0000 UTC m=+1542.492660467" watchObservedRunningTime="2025-10-09 00:32:24.971043671 +0000 UTC m=+1542.496682402" Oct 09 00:32:24 crc kubenswrapper[4810]: I1009 00:32:24.989537 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" podStartSLOduration=2.049052224 podStartE2EDuration="6.989517284s" podCreationTimestamp="2025-10-09 00:32:18 +0000 UTC" firstStartedPulling="2025-10-09 00:32:19.180538659 +0000 UTC m=+1536.706177360" lastFinishedPulling="2025-10-09 00:32:24.121003699 +0000 UTC m=+1541.646642420" observedRunningTime="2025-10-09 00:32:24.984312784 +0000 UTC m=+1542.509951485" watchObservedRunningTime="2025-10-09 00:32:24.989517284 +0000 UTC m=+1542.515155995" Oct 09 00:32:25 crc kubenswrapper[4810]: I1009 00:32:25.808577 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Oct 09 00:32:25 crc kubenswrapper[4810]: I1009 00:32:25.847990 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Oct 09 00:32:25 crc kubenswrapper[4810]: I1009 00:32:25.937640 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Oct 09 00:32:29 crc kubenswrapper[4810]: I1009 00:32:29.255939 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:32:29 crc kubenswrapper[4810]: E1009 00:32:29.256660 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:32:30 crc kubenswrapper[4810]: I1009 00:32:30.700409 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-cn5ld"] Oct 09 00:32:30 crc kubenswrapper[4810]: I1009 00:32:30.701561 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" podUID="a53e61eb-d1f3-4467-abe6-a25041614849" containerName="default-interconnect" containerID="cri-o://840a76de9ec4b37b0cb4d9f925825f096126b204d3279b09f60291d063716738" gracePeriod=30 Oct 09 00:32:30 crc kubenswrapper[4810]: I1009 00:32:30.931979 4810 generic.go:334] "Generic (PLEG): container finished" podID="a53e61eb-d1f3-4467-abe6-a25041614849" containerID="840a76de9ec4b37b0cb4d9f925825f096126b204d3279b09f60291d063716738" exitCode=0 Oct 09 00:32:30 crc kubenswrapper[4810]: I1009 00:32:30.932020 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" event={"ID":"a53e61eb-d1f3-4467-abe6-a25041614849","Type":"ContainerDied","Data":"840a76de9ec4b37b0cb4d9f925825f096126b204d3279b09f60291d063716738"} Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.939931 4810 generic.go:334] "Generic (PLEG): container finished" podID="d7a9475e-1b07-437e-9837-67c2a0b43887" containerID="db2e12849cfec6dfbb3317bbcbe3c92801cdc0495f560ec1e7b831aa677f597a" exitCode=0 Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.939996 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerDied","Data":"db2e12849cfec6dfbb3317bbcbe3c92801cdc0495f560ec1e7b831aa677f597a"} Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.940442 4810 scope.go:117] "RemoveContainer" containerID="db2e12849cfec6dfbb3317bbcbe3c92801cdc0495f560ec1e7b831aa677f597a" Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.942949 4810 generic.go:334] "Generic (PLEG): container finished" podID="417484c8-987f-4ac7-b8ca-7de8c2dfc404" containerID="a9e707569022d6f724ef186602a88b57a9bbba584c3fb8b60aa77e526ba25eaf" exitCode=0 Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.942995 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerDied","Data":"a9e707569022d6f724ef186602a88b57a9bbba584c3fb8b60aa77e526ba25eaf"} Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.943264 4810 scope.go:117] "RemoveContainer" containerID="a9e707569022d6f724ef186602a88b57a9bbba584c3fb8b60aa77e526ba25eaf" Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.946014 4810 generic.go:334] "Generic (PLEG): container finished" podID="5ff87281-48cc-432c-8bb8-fe29bf27f7da" containerID="a5b7db755d1612899f7a556ce93d6dcec06e566827648d323c6808010588ee4b" exitCode=0 Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.946073 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerDied","Data":"a5b7db755d1612899f7a556ce93d6dcec06e566827648d323c6808010588ee4b"} Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.946395 4810 scope.go:117] "RemoveContainer" containerID="a5b7db755d1612899f7a556ce93d6dcec06e566827648d323c6808010588ee4b" Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.948956 4810 generic.go:334] "Generic (PLEG): container finished" podID="94243b4a-0039-4087-9a1e-14199e488715" containerID="6d92699b79ea21ef8e9282dbe3bea5d94e1d24d71e208d58ea098dff30726373" exitCode=0 Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.949003 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerDied","Data":"6d92699b79ea21ef8e9282dbe3bea5d94e1d24d71e208d58ea098dff30726373"} Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.949754 4810 scope.go:117] "RemoveContainer" containerID="6d92699b79ea21ef8e9282dbe3bea5d94e1d24d71e208d58ea098dff30726373" Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.952924 4810 generic.go:334] "Generic (PLEG): container finished" podID="1dce5a91-a66f-4936-9a04-05cd00df18e5" containerID="6e1a13cd2183514a2835eda6fc9aa1b0ce7e159d71861f106c556f29d93f0349" exitCode=0 Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.952961 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerDied","Data":"6e1a13cd2183514a2835eda6fc9aa1b0ce7e159d71861f106c556f29d93f0349"} Oct 09 00:32:31 crc kubenswrapper[4810]: I1009 00:32:31.953367 4810 scope.go:117] "RemoveContainer" containerID="6e1a13cd2183514a2835eda6fc9aa1b0ce7e159d71861f106c556f29d93f0349" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.488148 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527295 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-credentials\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527373 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg2bx\" (UniqueName: \"kubernetes.io/projected/a53e61eb-d1f3-4467-abe6-a25041614849-kube-api-access-rg2bx\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527420 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-config\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527456 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-users\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527529 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-ca\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527577 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-ca\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.527619 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-credentials\") pod \"a53e61eb-d1f3-4467-abe6-a25041614849\" (UID: \"a53e61eb-d1f3-4467-abe6-a25041614849\") " Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.528611 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.545996 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.548406 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a53e61eb-d1f3-4467-abe6-a25041614849-kube-api-access-rg2bx" (OuterVolumeSpecName: "kube-api-access-rg2bx") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "kube-api-access-rg2bx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.548614 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.548748 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.549920 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6l6gg"] Oct 09 00:32:33 crc kubenswrapper[4810]: E1009 00:32:33.550269 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a53e61eb-d1f3-4467-abe6-a25041614849" containerName="default-interconnect" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.550288 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="a53e61eb-d1f3-4467-abe6-a25041614849" containerName="default-interconnect" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.550459 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="a53e61eb-d1f3-4467-abe6-a25041614849" containerName="default-interconnect" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.551048 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.551697 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.553211 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "a53e61eb-d1f3-4467-abe6-a25041614849" (UID: "a53e61eb-d1f3-4467-abe6-a25041614849"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.564467 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6l6gg"] Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630646 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630709 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/5980be55-e943-49e1-b2c3-a08981372c6b-sasl-config\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630763 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630789 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2878\" (UniqueName: \"kubernetes.io/projected/5980be55-e943-49e1-b2c3-a08981372c6b-kube-api-access-t2878\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630845 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-sasl-users\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630906 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.630961 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631037 4810 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631056 4810 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631069 4810 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631085 4810 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631099 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg2bx\" (UniqueName: \"kubernetes.io/projected/a53e61eb-d1f3-4467-abe6-a25041614849-kube-api-access-rg2bx\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631111 4810 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.631123 4810 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a53e61eb-d1f3-4467-abe6-a25041614849-sasl-users\") on node \"crc\" DevicePath \"\"" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734444 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734525 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734551 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/5980be55-e943-49e1-b2c3-a08981372c6b-sasl-config\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734578 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734599 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2878\" (UniqueName: \"kubernetes.io/projected/5980be55-e943-49e1-b2c3-a08981372c6b-kube-api-access-t2878\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734626 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-sasl-users\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.734666 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.737601 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/5980be55-e943-49e1-b2c3-a08981372c6b-sasl-config\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.739714 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.739857 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.740578 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.741563 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-sasl-users\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.742046 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/5980be55-e943-49e1-b2c3-a08981372c6b-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.752007 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2878\" (UniqueName: \"kubernetes.io/projected/5980be55-e943-49e1-b2c3-a08981372c6b-kube-api-access-t2878\") pod \"default-interconnect-68864d46cb-6l6gg\" (UID: \"5980be55-e943-49e1-b2c3-a08981372c6b\") " pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:33 crc kubenswrapper[4810]: I1009 00:32:33.899020 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.004221 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerStarted","Data":"19e1f47d4284cffc71e59643f4362181c6044b13582931f3fb89f1970524a557"} Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.013274 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerStarted","Data":"4e7d187b1cf44385f79cd4551e938ffcc4b2276b39a9b3d7f991bdd7dd262cb9"} Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.015105 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" event={"ID":"a53e61eb-d1f3-4467-abe6-a25041614849","Type":"ContainerDied","Data":"8b28ccfc14478bc3f7e4457fd2cdadce9db703df42a86d130fa648ce06cf56a6"} Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.015134 4810 scope.go:117] "RemoveContainer" containerID="840a76de9ec4b37b0cb4d9f925825f096126b204d3279b09f60291d063716738" Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.015247 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-cn5ld" Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.034329 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerStarted","Data":"f3e9dcfa47df8dd4f3e6cffbbef9a154158861d147912132907a1b15d7419638"} Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.060720 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerStarted","Data":"fcd06119d8010a23b22f890221b337358ff28f81c20a26d46975127cd2103c36"} Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.063580 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerStarted","Data":"06c379c10a67101422d57aab9c77e1048bb55de24f7d3fc8f797e29a00d4cafb"} Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.118884 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-cn5ld"] Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.133238 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-cn5ld"] Oct 09 00:32:34 crc kubenswrapper[4810]: I1009 00:32:34.505873 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-6l6gg"] Oct 09 00:32:34 crc kubenswrapper[4810]: W1009 00:32:34.512084 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5980be55_e943_49e1_b2c3_a08981372c6b.slice/crio-6fe2394c773cf8f3c53e2540f2b5e19146b1ef3a5ac51940982dc07809dabea0 WatchSource:0}: Error finding container 6fe2394c773cf8f3c53e2540f2b5e19146b1ef3a5ac51940982dc07809dabea0: Status 404 returned error can't find the container with id 6fe2394c773cf8f3c53e2540f2b5e19146b1ef3a5ac51940982dc07809dabea0 Oct 09 00:32:34 crc kubenswrapper[4810]: E1009 00:32:34.686649 4810 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1dce5a91_a66f_4936_9a04_05cd00df18e5.slice/crio-fcd06119d8010a23b22f890221b337358ff28f81c20a26d46975127cd2103c36.scope\": RecentStats: unable to find data in memory cache]" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.072763 4810 generic.go:334] "Generic (PLEG): container finished" podID="5ff87281-48cc-432c-8bb8-fe29bf27f7da" containerID="4e7d187b1cf44385f79cd4551e938ffcc4b2276b39a9b3d7f991bdd7dd262cb9" exitCode=0 Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.072853 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerDied","Data":"4e7d187b1cf44385f79cd4551e938ffcc4b2276b39a9b3d7f991bdd7dd262cb9"} Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.072889 4810 scope.go:117] "RemoveContainer" containerID="a5b7db755d1612899f7a556ce93d6dcec06e566827648d323c6808010588ee4b" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.073595 4810 scope.go:117] "RemoveContainer" containerID="4e7d187b1cf44385f79cd4551e938ffcc4b2276b39a9b3d7f991bdd7dd262cb9" Oct 09 00:32:35 crc kubenswrapper[4810]: E1009 00:32:35.074113 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs_service-telemetry(5ff87281-48cc-432c-8bb8-fe29bf27f7da)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" podUID="5ff87281-48cc-432c-8bb8-fe29bf27f7da" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.079647 4810 generic.go:334] "Generic (PLEG): container finished" podID="94243b4a-0039-4087-9a1e-14199e488715" containerID="f3e9dcfa47df8dd4f3e6cffbbef9a154158861d147912132907a1b15d7419638" exitCode=0 Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.079695 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerDied","Data":"f3e9dcfa47df8dd4f3e6cffbbef9a154158861d147912132907a1b15d7419638"} Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.080190 4810 scope.go:117] "RemoveContainer" containerID="f3e9dcfa47df8dd4f3e6cffbbef9a154158861d147912132907a1b15d7419638" Oct 09 00:32:35 crc kubenswrapper[4810]: E1009 00:32:35.080385 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2_service-telemetry(94243b4a-0039-4087-9a1e-14199e488715)\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" podUID="94243b4a-0039-4087-9a1e-14199e488715" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.081418 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" event={"ID":"5980be55-e943-49e1-b2c3-a08981372c6b","Type":"ContainerStarted","Data":"60bb91d15e5e8a8e8fbb5a6eea3b50c0b59e0f2f5e9e8240890e7fdfe955ad1b"} Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.081473 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" event={"ID":"5980be55-e943-49e1-b2c3-a08981372c6b","Type":"ContainerStarted","Data":"6fe2394c773cf8f3c53e2540f2b5e19146b1ef3a5ac51940982dc07809dabea0"} Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.084786 4810 generic.go:334] "Generic (PLEG): container finished" podID="1dce5a91-a66f-4936-9a04-05cd00df18e5" containerID="fcd06119d8010a23b22f890221b337358ff28f81c20a26d46975127cd2103c36" exitCode=0 Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.084851 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerDied","Data":"fcd06119d8010a23b22f890221b337358ff28f81c20a26d46975127cd2103c36"} Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.085433 4810 scope.go:117] "RemoveContainer" containerID="fcd06119d8010a23b22f890221b337358ff28f81c20a26d46975127cd2103c36" Oct 09 00:32:35 crc kubenswrapper[4810]: E1009 00:32:35.085632 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g_service-telemetry(1dce5a91-a66f-4936-9a04-05cd00df18e5)\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" podUID="1dce5a91-a66f-4936-9a04-05cd00df18e5" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.087149 4810 generic.go:334] "Generic (PLEG): container finished" podID="417484c8-987f-4ac7-b8ca-7de8c2dfc404" containerID="19e1f47d4284cffc71e59643f4362181c6044b13582931f3fb89f1970524a557" exitCode=0 Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.087187 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerDied","Data":"19e1f47d4284cffc71e59643f4362181c6044b13582931f3fb89f1970524a557"} Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.087585 4810 scope.go:117] "RemoveContainer" containerID="19e1f47d4284cffc71e59643f4362181c6044b13582931f3fb89f1970524a557" Oct 09 00:32:35 crc kubenswrapper[4810]: E1009 00:32:35.087783 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9_service-telemetry(417484c8-987f-4ac7-b8ca-7de8c2dfc404)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" podUID="417484c8-987f-4ac7-b8ca-7de8c2dfc404" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.122425 4810 scope.go:117] "RemoveContainer" containerID="6d92699b79ea21ef8e9282dbe3bea5d94e1d24d71e208d58ea098dff30726373" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.186002 4810 scope.go:117] "RemoveContainer" containerID="6e1a13cd2183514a2835eda6fc9aa1b0ce7e159d71861f106c556f29d93f0349" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.200452 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-6l6gg" podStartSLOduration=5.200433971 podStartE2EDuration="5.200433971s" podCreationTimestamp="2025-10-09 00:32:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 00:32:35.197002502 +0000 UTC m=+1552.722641203" watchObservedRunningTime="2025-10-09 00:32:35.200433971 +0000 UTC m=+1552.726072672" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.231362 4810 scope.go:117] "RemoveContainer" containerID="a9e707569022d6f724ef186602a88b57a9bbba584c3fb8b60aa77e526ba25eaf" Oct 09 00:32:35 crc kubenswrapper[4810]: I1009 00:32:35.262632 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a53e61eb-d1f3-4467-abe6-a25041614849" path="/var/lib/kubelet/pods/a53e61eb-d1f3-4467-abe6-a25041614849/volumes" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.101570 4810 generic.go:334] "Generic (PLEG): container finished" podID="d7a9475e-1b07-437e-9837-67c2a0b43887" containerID="06c379c10a67101422d57aab9c77e1048bb55de24f7d3fc8f797e29a00d4cafb" exitCode=0 Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.101637 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerDied","Data":"06c379c10a67101422d57aab9c77e1048bb55de24f7d3fc8f797e29a00d4cafb"} Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.101675 4810 scope.go:117] "RemoveContainer" containerID="db2e12849cfec6dfbb3317bbcbe3c92801cdc0495f560ec1e7b831aa677f597a" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.102275 4810 scope.go:117] "RemoveContainer" containerID="06c379c10a67101422d57aab9c77e1048bb55de24f7d3fc8f797e29a00d4cafb" Oct 09 00:32:36 crc kubenswrapper[4810]: E1009 00:32:36.102500 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-5759b4d97-775pw_service-telemetry(d7a9475e-1b07-437e-9837-67c2a0b43887)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" podUID="d7a9475e-1b07-437e-9837-67c2a0b43887" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.292177 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.292978 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.296206 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.296373 4810 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.309225 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.370509 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7nkb\" (UniqueName: \"kubernetes.io/projected/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-kube-api-access-f7nkb\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.370664 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.370758 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-qdr-test-config\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.472175 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7nkb\" (UniqueName: \"kubernetes.io/projected/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-kube-api-access-f7nkb\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.472248 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.472293 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-qdr-test-config\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.473048 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-qdr-test-config\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.481658 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.495436 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7nkb\" (UniqueName: \"kubernetes.io/projected/f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e-kube-api-access-f7nkb\") pod \"qdr-test\" (UID: \"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e\") " pod="service-telemetry/qdr-test" Oct 09 00:32:36 crc kubenswrapper[4810]: I1009 00:32:36.606850 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Oct 09 00:32:37 crc kubenswrapper[4810]: I1009 00:32:37.037464 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Oct 09 00:32:37 crc kubenswrapper[4810]: W1009 00:32:37.054025 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8dcc19d_9fcd_4979_91a5_7b1da2c74d9e.slice/crio-5124702b03a1360bd2918f08eeeb1ef8b0c844ad3c6f99cae44eb722cb1f35bf WatchSource:0}: Error finding container 5124702b03a1360bd2918f08eeeb1ef8b0c844ad3c6f99cae44eb722cb1f35bf: Status 404 returned error can't find the container with id 5124702b03a1360bd2918f08eeeb1ef8b0c844ad3c6f99cae44eb722cb1f35bf Oct 09 00:32:37 crc kubenswrapper[4810]: I1009 00:32:37.116729 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e","Type":"ContainerStarted","Data":"5124702b03a1360bd2918f08eeeb1ef8b0c844ad3c6f99cae44eb722cb1f35bf"} Oct 09 00:32:44 crc kubenswrapper[4810]: I1009 00:32:44.253988 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:32:44 crc kubenswrapper[4810]: E1009 00:32:44.254696 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.173518 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e","Type":"ContainerStarted","Data":"83adfddf6a104fcd4c7070a228e93154ab38e9d35f92caf861238dbd1d636bfc"} Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.191817 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.979778142 podStartE2EDuration="9.191799152s" podCreationTimestamp="2025-10-09 00:32:36 +0000 UTC" firstStartedPulling="2025-10-09 00:32:37.055566363 +0000 UTC m=+1554.581205074" lastFinishedPulling="2025-10-09 00:32:44.267587383 +0000 UTC m=+1561.793226084" observedRunningTime="2025-10-09 00:32:45.190058592 +0000 UTC m=+1562.715697293" watchObservedRunningTime="2025-10-09 00:32:45.191799152 +0000 UTC m=+1562.717437863" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.460095 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-h7q92"] Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.461305 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.466005 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.466544 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.467204 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.467343 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.467453 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.467664 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.468715 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-h7q92"] Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.508479 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-sensubility-config\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.508768 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-publisher\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.508805 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmx7z\" (UniqueName: \"kubernetes.io/projected/35debcab-8710-4907-a711-5fad15dc9c24-kube-api-access-nmx7z\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.508926 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-healthcheck-log\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.508965 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.508982 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-config\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.509028 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610097 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610198 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-sensubility-config\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610234 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-publisher\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610298 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmx7z\" (UniqueName: \"kubernetes.io/projected/35debcab-8710-4907-a711-5fad15dc9c24-kube-api-access-nmx7z\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610343 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-healthcheck-log\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610409 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-config\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.610436 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.611075 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-sensubility-config\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.611244 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-publisher\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.611720 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.612930 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.612990 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-config\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.613265 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-healthcheck-log\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.634452 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmx7z\" (UniqueName: \"kubernetes.io/projected/35debcab-8710-4907-a711-5fad15dc9c24-kube-api-access-nmx7z\") pod \"stf-smoketest-smoke1-h7q92\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.777899 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.939044 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.940330 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 09 00:32:45 crc kubenswrapper[4810]: I1009 00:32:45.945236 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Oct 09 00:32:46 crc kubenswrapper[4810]: I1009 00:32:46.016759 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7xfw\" (UniqueName: \"kubernetes.io/projected/37b84493-6adc-4977-a62a-dda5f0eb310a-kube-api-access-w7xfw\") pod \"curl\" (UID: \"37b84493-6adc-4977-a62a-dda5f0eb310a\") " pod="service-telemetry/curl" Oct 09 00:32:46 crc kubenswrapper[4810]: I1009 00:32:46.118507 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7xfw\" (UniqueName: \"kubernetes.io/projected/37b84493-6adc-4977-a62a-dda5f0eb310a-kube-api-access-w7xfw\") pod \"curl\" (UID: \"37b84493-6adc-4977-a62a-dda5f0eb310a\") " pod="service-telemetry/curl" Oct 09 00:32:46 crc kubenswrapper[4810]: I1009 00:32:46.135686 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7xfw\" (UniqueName: \"kubernetes.io/projected/37b84493-6adc-4977-a62a-dda5f0eb310a-kube-api-access-w7xfw\") pod \"curl\" (UID: \"37b84493-6adc-4977-a62a-dda5f0eb310a\") " pod="service-telemetry/curl" Oct 09 00:32:46 crc kubenswrapper[4810]: I1009 00:32:46.230696 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-h7q92"] Oct 09 00:32:46 crc kubenswrapper[4810]: W1009 00:32:46.235777 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35debcab_8710_4907_a711_5fad15dc9c24.slice/crio-989dd88a37e62471aeb3621a282a4eddee4e7437ee23cb8f14e24ebc2a6d25d3 WatchSource:0}: Error finding container 989dd88a37e62471aeb3621a282a4eddee4e7437ee23cb8f14e24ebc2a6d25d3: Status 404 returned error can't find the container with id 989dd88a37e62471aeb3621a282a4eddee4e7437ee23cb8f14e24ebc2a6d25d3 Oct 09 00:32:46 crc kubenswrapper[4810]: I1009 00:32:46.262959 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 09 00:32:46 crc kubenswrapper[4810]: I1009 00:32:46.703713 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Oct 09 00:32:47 crc kubenswrapper[4810]: I1009 00:32:47.189529 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-h7q92" event={"ID":"35debcab-8710-4907-a711-5fad15dc9c24","Type":"ContainerStarted","Data":"989dd88a37e62471aeb3621a282a4eddee4e7437ee23cb8f14e24ebc2a6d25d3"} Oct 09 00:32:47 crc kubenswrapper[4810]: I1009 00:32:47.191336 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"37b84493-6adc-4977-a62a-dda5f0eb310a","Type":"ContainerStarted","Data":"e08ad8c0b088b69e390ac4bdbca5ae1e81b6f5a76360de491aa1f7cde072a6be"} Oct 09 00:32:48 crc kubenswrapper[4810]: I1009 00:32:48.257703 4810 scope.go:117] "RemoveContainer" containerID="19e1f47d4284cffc71e59643f4362181c6044b13582931f3fb89f1970524a557" Oct 09 00:32:48 crc kubenswrapper[4810]: I1009 00:32:48.258729 4810 scope.go:117] "RemoveContainer" containerID="f3e9dcfa47df8dd4f3e6cffbbef9a154158861d147912132907a1b15d7419638" Oct 09 00:32:48 crc kubenswrapper[4810]: I1009 00:32:48.259154 4810 scope.go:117] "RemoveContainer" containerID="4e7d187b1cf44385f79cd4551e938ffcc4b2276b39a9b3d7f991bdd7dd262cb9" Oct 09 00:32:49 crc kubenswrapper[4810]: I1009 00:32:49.253409 4810 scope.go:117] "RemoveContainer" containerID="fcd06119d8010a23b22f890221b337358ff28f81c20a26d46975127cd2103c36" Oct 09 00:32:50 crc kubenswrapper[4810]: I1009 00:32:50.253630 4810 scope.go:117] "RemoveContainer" containerID="06c379c10a67101422d57aab9c77e1048bb55de24f7d3fc8f797e29a00d4cafb" Oct 09 00:32:53 crc kubenswrapper[4810]: I1009 00:32:53.240928 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9" event={"ID":"417484c8-987f-4ac7-b8ca-7de8c2dfc404","Type":"ContainerStarted","Data":"a91828a04fda6593d2c84fba21d0487d0f6add7da230e2595d5178f294c5f3a2"} Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.254239 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:32:59 crc kubenswrapper[4810]: E1009 00:32:59.255133 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.295270 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-h7q92" event={"ID":"35debcab-8710-4907-a711-5fad15dc9c24","Type":"ContainerStarted","Data":"10aa5b7fa55171d4c451d2629e5337c018cfc9a5c4a8ca4d40d85194aa92efc3"} Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.297356 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs" event={"ID":"5ff87281-48cc-432c-8bb8-fe29bf27f7da","Type":"ContainerStarted","Data":"cda49ba90e553924bcfef76f15f860b25d71fa4e4d4c78a2cb5fb5122899c185"} Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.300593 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2" event={"ID":"94243b4a-0039-4087-9a1e-14199e488715","Type":"ContainerStarted","Data":"c360b4e0a7ceb8d0e24ebe19400234740fce2fcb5c4cd2c5168b927228c2eea2"} Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.309072 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g" event={"ID":"1dce5a91-a66f-4936-9a04-05cd00df18e5","Type":"ContainerStarted","Data":"efededd6ddf9e366a6861103b5cdc5e8fa4fe0cb4341f1c4906ee5fcaa043e4d"} Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.312024 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-775pw" event={"ID":"d7a9475e-1b07-437e-9837-67c2a0b43887","Type":"ContainerStarted","Data":"adc655ea1d2505a7cdf8bb73437954aa68e2f904d4ea29c30a9f6aa823103066"} Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.317877 4810 generic.go:334] "Generic (PLEG): container finished" podID="37b84493-6adc-4977-a62a-dda5f0eb310a" containerID="2147cf978887269ffa1283402021e23458ba7a86b98fb1f0b187fdde2380d77d" exitCode=0 Oct 09 00:32:59 crc kubenswrapper[4810]: I1009 00:32:59.317927 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"37b84493-6adc-4977-a62a-dda5f0eb310a","Type":"ContainerDied","Data":"2147cf978887269ffa1283402021e23458ba7a86b98fb1f0b187fdde2380d77d"} Oct 09 00:33:00 crc kubenswrapper[4810]: I1009 00:33:00.605710 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 09 00:33:00 crc kubenswrapper[4810]: I1009 00:33:00.637905 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7xfw\" (UniqueName: \"kubernetes.io/projected/37b84493-6adc-4977-a62a-dda5f0eb310a-kube-api-access-w7xfw\") pod \"37b84493-6adc-4977-a62a-dda5f0eb310a\" (UID: \"37b84493-6adc-4977-a62a-dda5f0eb310a\") " Oct 09 00:33:00 crc kubenswrapper[4810]: I1009 00:33:00.656920 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37b84493-6adc-4977-a62a-dda5f0eb310a-kube-api-access-w7xfw" (OuterVolumeSpecName: "kube-api-access-w7xfw") pod "37b84493-6adc-4977-a62a-dda5f0eb310a" (UID: "37b84493-6adc-4977-a62a-dda5f0eb310a"). InnerVolumeSpecName "kube-api-access-w7xfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:33:00 crc kubenswrapper[4810]: I1009 00:33:00.739884 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7xfw\" (UniqueName: \"kubernetes.io/projected/37b84493-6adc-4977-a62a-dda5f0eb310a-kube-api-access-w7xfw\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:00 crc kubenswrapper[4810]: I1009 00:33:00.766070 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_37b84493-6adc-4977-a62a-dda5f0eb310a/curl/0.log" Oct 09 00:33:01 crc kubenswrapper[4810]: I1009 00:33:01.035436 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-g867m_34160a9e-a9d8-45f2-be37-889dfbe283a5/prometheus-webhook-snmp/0.log" Oct 09 00:33:01 crc kubenswrapper[4810]: I1009 00:33:01.331972 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"37b84493-6adc-4977-a62a-dda5f0eb310a","Type":"ContainerDied","Data":"e08ad8c0b088b69e390ac4bdbca5ae1e81b6f5a76360de491aa1f7cde072a6be"} Oct 09 00:33:01 crc kubenswrapper[4810]: I1009 00:33:01.332011 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e08ad8c0b088b69e390ac4bdbca5ae1e81b6f5a76360de491aa1f7cde072a6be" Oct 09 00:33:01 crc kubenswrapper[4810]: I1009 00:33:01.332030 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Oct 09 00:33:06 crc kubenswrapper[4810]: I1009 00:33:06.385909 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-h7q92" event={"ID":"35debcab-8710-4907-a711-5fad15dc9c24","Type":"ContainerStarted","Data":"3d0ccdc9f4cf21dc8f957cc66081b9a9aa89746087e64ab770483008f535e360"} Oct 09 00:33:06 crc kubenswrapper[4810]: I1009 00:33:06.401855 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-h7q92" podStartSLOduration=2.259711355 podStartE2EDuration="21.401813696s" podCreationTimestamp="2025-10-09 00:32:45 +0000 UTC" firstStartedPulling="2025-10-09 00:32:46.237684948 +0000 UTC m=+1563.763323649" lastFinishedPulling="2025-10-09 00:33:05.379787289 +0000 UTC m=+1582.905425990" observedRunningTime="2025-10-09 00:33:06.399308004 +0000 UTC m=+1583.924946735" watchObservedRunningTime="2025-10-09 00:33:06.401813696 +0000 UTC m=+1583.927452427" Oct 09 00:33:10 crc kubenswrapper[4810]: I1009 00:33:10.254565 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:33:10 crc kubenswrapper[4810]: E1009 00:33:10.255859 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:33:21 crc kubenswrapper[4810]: I1009 00:33:21.254478 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:33:21 crc kubenswrapper[4810]: E1009 00:33:21.255656 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:33:31 crc kubenswrapper[4810]: I1009 00:33:31.202113 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-g867m_34160a9e-a9d8-45f2-be37-889dfbe283a5/prometheus-webhook-snmp/0.log" Oct 09 00:33:32 crc kubenswrapper[4810]: I1009 00:33:32.617810 4810 generic.go:334] "Generic (PLEG): container finished" podID="35debcab-8710-4907-a711-5fad15dc9c24" containerID="10aa5b7fa55171d4c451d2629e5337c018cfc9a5c4a8ca4d40d85194aa92efc3" exitCode=0 Oct 09 00:33:32 crc kubenswrapper[4810]: I1009 00:33:32.617868 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-h7q92" event={"ID":"35debcab-8710-4907-a711-5fad15dc9c24","Type":"ContainerDied","Data":"10aa5b7fa55171d4c451d2629e5337c018cfc9a5c4a8ca4d40d85194aa92efc3"} Oct 09 00:33:32 crc kubenswrapper[4810]: I1009 00:33:32.618586 4810 scope.go:117] "RemoveContainer" containerID="10aa5b7fa55171d4c451d2629e5337c018cfc9a5c4a8ca4d40d85194aa92efc3" Oct 09 00:33:33 crc kubenswrapper[4810]: I1009 00:33:33.263689 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:33:33 crc kubenswrapper[4810]: E1009 00:33:33.264231 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:33:37 crc kubenswrapper[4810]: I1009 00:33:37.666281 4810 generic.go:334] "Generic (PLEG): container finished" podID="35debcab-8710-4907-a711-5fad15dc9c24" containerID="3d0ccdc9f4cf21dc8f957cc66081b9a9aa89746087e64ab770483008f535e360" exitCode=0 Oct 09 00:33:37 crc kubenswrapper[4810]: I1009 00:33:37.666341 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-h7q92" event={"ID":"35debcab-8710-4907-a711-5fad15dc9c24","Type":"ContainerDied","Data":"3d0ccdc9f4cf21dc8f957cc66081b9a9aa89746087e64ab770483008f535e360"} Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.010074 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.124808 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-publisher\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.124924 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-entrypoint-script\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.124967 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-entrypoint-script\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.124991 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmx7z\" (UniqueName: \"kubernetes.io/projected/35debcab-8710-4907-a711-5fad15dc9c24-kube-api-access-nmx7z\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.125010 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-sensubility-config\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.125069 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-healthcheck-log\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.125089 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-config\") pod \"35debcab-8710-4907-a711-5fad15dc9c24\" (UID: \"35debcab-8710-4907-a711-5fad15dc9c24\") " Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.130945 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35debcab-8710-4907-a711-5fad15dc9c24-kube-api-access-nmx7z" (OuterVolumeSpecName: "kube-api-access-nmx7z") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "kube-api-access-nmx7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.144806 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.146026 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.147692 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.154325 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.157918 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.165706 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "35debcab-8710-4907-a711-5fad15dc9c24" (UID: "35debcab-8710-4907-a711-5fad15dc9c24"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.226986 4810 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-sensubility-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.227041 4810 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-healthcheck-log\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.227060 4810 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-config\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.227077 4810 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.227100 4810 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.227118 4810 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/35debcab-8710-4907-a711-5fad15dc9c24-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.227136 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmx7z\" (UniqueName: \"kubernetes.io/projected/35debcab-8710-4907-a711-5fad15dc9c24-kube-api-access-nmx7z\") on node \"crc\" DevicePath \"\"" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.687019 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-h7q92" event={"ID":"35debcab-8710-4907-a711-5fad15dc9c24","Type":"ContainerDied","Data":"989dd88a37e62471aeb3621a282a4eddee4e7437ee23cb8f14e24ebc2a6d25d3"} Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.687078 4810 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="989dd88a37e62471aeb3621a282a4eddee4e7437ee23cb8f14e24ebc2a6d25d3" Oct 09 00:33:39 crc kubenswrapper[4810]: I1009 00:33:39.687086 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-h7q92" Oct 09 00:33:41 crc kubenswrapper[4810]: I1009 00:33:41.129505 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-h7q92_35debcab-8710-4907-a711-5fad15dc9c24/smoketest-collectd/0.log" Oct 09 00:33:41 crc kubenswrapper[4810]: I1009 00:33:41.438651 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-h7q92_35debcab-8710-4907-a711-5fad15dc9c24/smoketest-ceilometer/0.log" Oct 09 00:33:41 crc kubenswrapper[4810]: I1009 00:33:41.832399 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-6l6gg_5980be55-e943-49e1-b2c3-a08981372c6b/default-interconnect/0.log" Oct 09 00:33:42 crc kubenswrapper[4810]: I1009 00:33:42.130642 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2_94243b4a-0039-4087-9a1e-14199e488715/bridge/2.log" Oct 09 00:33:42 crc kubenswrapper[4810]: I1009 00:33:42.435345 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-9q9g2_94243b4a-0039-4087-9a1e-14199e488715/sg-core/0.log" Oct 09 00:33:42 crc kubenswrapper[4810]: I1009 00:33:42.708771 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs_5ff87281-48cc-432c-8bb8-fe29bf27f7da/bridge/2.log" Oct 09 00:33:43 crc kubenswrapper[4810]: I1009 00:33:43.006391 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-66f4b64b59-fswqs_5ff87281-48cc-432c-8bb8-fe29bf27f7da/sg-core/0.log" Oct 09 00:33:43 crc kubenswrapper[4810]: I1009 00:33:43.317247 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g_1dce5a91-a66f-4936-9a04-05cd00df18e5/bridge/2.log" Oct 09 00:33:43 crc kubenswrapper[4810]: I1009 00:33:43.639430 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-pg22g_1dce5a91-a66f-4936-9a04-05cd00df18e5/sg-core/0.log" Oct 09 00:33:43 crc kubenswrapper[4810]: I1009 00:33:43.962617 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9_417484c8-987f-4ac7-b8ca-7de8c2dfc404/bridge/2.log" Oct 09 00:33:44 crc kubenswrapper[4810]: I1009 00:33:44.312603 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-7cd45fb898-q86g9_417484c8-987f-4ac7-b8ca-7de8c2dfc404/sg-core/0.log" Oct 09 00:33:44 crc kubenswrapper[4810]: I1009 00:33:44.612337 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-775pw_d7a9475e-1b07-437e-9837-67c2a0b43887/bridge/2.log" Oct 09 00:33:44 crc kubenswrapper[4810]: I1009 00:33:44.915622 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-775pw_d7a9475e-1b07-437e-9837-67c2a0b43887/sg-core/0.log" Oct 09 00:33:48 crc kubenswrapper[4810]: I1009 00:33:48.253941 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:33:48 crc kubenswrapper[4810]: E1009 00:33:48.254751 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:33:48 crc kubenswrapper[4810]: I1009 00:33:48.304154 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-55b56d7ff-xqq2r_09e03be9-1c87-4bbc-92ac-5f1de2289095/operator/0.log" Oct 09 00:33:48 crc kubenswrapper[4810]: I1009 00:33:48.614881 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_f9020904-a94e-479d-a497-75a587aed860/prometheus/0.log" Oct 09 00:33:48 crc kubenswrapper[4810]: I1009 00:33:48.959609 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_da88a141-8779-4b30-9ea7-3477d4ad9ee5/elasticsearch/0.log" Oct 09 00:33:49 crc kubenswrapper[4810]: I1009 00:33:49.255542 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-g867m_34160a9e-a9d8-45f2-be37-889dfbe283a5/prometheus-webhook-snmp/0.log" Oct 09 00:33:49 crc kubenswrapper[4810]: I1009 00:33:49.580496 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_f9601525-d2da-4890-a464-42f2c4a7f0f0/alertmanager/0.log" Oct 09 00:34:02 crc kubenswrapper[4810]: I1009 00:34:02.254171 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:34:02 crc kubenswrapper[4810]: E1009 00:34:02.254868 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:34:06 crc kubenswrapper[4810]: I1009 00:34:06.363215 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-99856677b-nt8cc_1278b5d5-8f0f-4b50-9984-6e2bdbea7480/operator/0.log" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.313266 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sll58"] Oct 09 00:34:08 crc kubenswrapper[4810]: E1009 00:34:08.313913 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37b84493-6adc-4977-a62a-dda5f0eb310a" containerName="curl" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.313932 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="37b84493-6adc-4977-a62a-dda5f0eb310a" containerName="curl" Oct 09 00:34:08 crc kubenswrapper[4810]: E1009 00:34:08.313958 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35debcab-8710-4907-a711-5fad15dc9c24" containerName="smoketest-ceilometer" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.313967 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="35debcab-8710-4907-a711-5fad15dc9c24" containerName="smoketest-ceilometer" Oct 09 00:34:08 crc kubenswrapper[4810]: E1009 00:34:08.313983 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35debcab-8710-4907-a711-5fad15dc9c24" containerName="smoketest-collectd" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.313994 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="35debcab-8710-4907-a711-5fad15dc9c24" containerName="smoketest-collectd" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.314144 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="37b84493-6adc-4977-a62a-dda5f0eb310a" containerName="curl" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.314162 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="35debcab-8710-4907-a711-5fad15dc9c24" containerName="smoketest-collectd" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.314179 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="35debcab-8710-4907-a711-5fad15dc9c24" containerName="smoketest-ceilometer" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.318780 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.347967 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sll58"] Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.452649 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzlnw\" (UniqueName: \"kubernetes.io/projected/800f594c-ea98-4419-8f92-6c0e75e86709-kube-api-access-wzlnw\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.452702 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-catalog-content\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.452744 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-utilities\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.554261 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzlnw\" (UniqueName: \"kubernetes.io/projected/800f594c-ea98-4419-8f92-6c0e75e86709-kube-api-access-wzlnw\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.554303 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-catalog-content\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.554333 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-utilities\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.554783 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-utilities\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.554992 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-catalog-content\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.589805 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzlnw\" (UniqueName: \"kubernetes.io/projected/800f594c-ea98-4419-8f92-6c0e75e86709-kube-api-access-wzlnw\") pod \"certified-operators-sll58\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:08 crc kubenswrapper[4810]: I1009 00:34:08.640021 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:09 crc kubenswrapper[4810]: I1009 00:34:09.127674 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sll58"] Oct 09 00:34:09 crc kubenswrapper[4810]: I1009 00:34:09.906188 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-55b56d7ff-xqq2r_09e03be9-1c87-4bbc-92ac-5f1de2289095/operator/0.log" Oct 09 00:34:09 crc kubenswrapper[4810]: I1009 00:34:09.966421 4810 generic.go:334] "Generic (PLEG): container finished" podID="800f594c-ea98-4419-8f92-6c0e75e86709" containerID="cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726" exitCode=0 Oct 09 00:34:09 crc kubenswrapper[4810]: I1009 00:34:09.966466 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerDied","Data":"cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726"} Oct 09 00:34:09 crc kubenswrapper[4810]: I1009 00:34:09.966493 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerStarted","Data":"43bab295b063841a9a571e1fadeed7288a0b8aff9528ecc38f732a8516ad02aa"} Oct 09 00:34:10 crc kubenswrapper[4810]: I1009 00:34:10.205668 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_f8dcc19d-9fcd-4979-91a5-7b1da2c74d9e/qdr/0.log" Oct 09 00:34:10 crc kubenswrapper[4810]: I1009 00:34:10.975025 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerStarted","Data":"8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00"} Oct 09 00:34:11 crc kubenswrapper[4810]: I1009 00:34:11.988973 4810 generic.go:334] "Generic (PLEG): container finished" podID="800f594c-ea98-4419-8f92-6c0e75e86709" containerID="8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00" exitCode=0 Oct 09 00:34:11 crc kubenswrapper[4810]: I1009 00:34:11.989061 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerDied","Data":"8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00"} Oct 09 00:34:12 crc kubenswrapper[4810]: I1009 00:34:12.998459 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerStarted","Data":"47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c"} Oct 09 00:34:13 crc kubenswrapper[4810]: I1009 00:34:13.031823 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sll58" podStartSLOduration=2.584719347 podStartE2EDuration="5.031801409s" podCreationTimestamp="2025-10-09 00:34:08 +0000 UTC" firstStartedPulling="2025-10-09 00:34:09.967786884 +0000 UTC m=+1647.493425605" lastFinishedPulling="2025-10-09 00:34:12.414868976 +0000 UTC m=+1649.940507667" observedRunningTime="2025-10-09 00:34:13.025930982 +0000 UTC m=+1650.551569693" watchObservedRunningTime="2025-10-09 00:34:13.031801409 +0000 UTC m=+1650.557440120" Oct 09 00:34:17 crc kubenswrapper[4810]: I1009 00:34:17.254013 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:34:17 crc kubenswrapper[4810]: E1009 00:34:17.256247 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:34:18 crc kubenswrapper[4810]: I1009 00:34:18.640883 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:18 crc kubenswrapper[4810]: I1009 00:34:18.640982 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:18 crc kubenswrapper[4810]: I1009 00:34:18.711082 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:19 crc kubenswrapper[4810]: I1009 00:34:19.119407 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:19 crc kubenswrapper[4810]: I1009 00:34:19.179646 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sll58"] Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.061292 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sll58" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="registry-server" containerID="cri-o://47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c" gracePeriod=2 Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.492587 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.658815 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-utilities\") pod \"800f594c-ea98-4419-8f92-6c0e75e86709\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.659681 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-utilities" (OuterVolumeSpecName: "utilities") pod "800f594c-ea98-4419-8f92-6c0e75e86709" (UID: "800f594c-ea98-4419-8f92-6c0e75e86709"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.659774 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-catalog-content\") pod \"800f594c-ea98-4419-8f92-6c0e75e86709\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.659922 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzlnw\" (UniqueName: \"kubernetes.io/projected/800f594c-ea98-4419-8f92-6c0e75e86709-kube-api-access-wzlnw\") pod \"800f594c-ea98-4419-8f92-6c0e75e86709\" (UID: \"800f594c-ea98-4419-8f92-6c0e75e86709\") " Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.660808 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.674189 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/800f594c-ea98-4419-8f92-6c0e75e86709-kube-api-access-wzlnw" (OuterVolumeSpecName: "kube-api-access-wzlnw") pod "800f594c-ea98-4419-8f92-6c0e75e86709" (UID: "800f594c-ea98-4419-8f92-6c0e75e86709"). InnerVolumeSpecName "kube-api-access-wzlnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.722727 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "800f594c-ea98-4419-8f92-6c0e75e86709" (UID: "800f594c-ea98-4419-8f92-6c0e75e86709"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.761948 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzlnw\" (UniqueName: \"kubernetes.io/projected/800f594c-ea98-4419-8f92-6c0e75e86709-kube-api-access-wzlnw\") on node \"crc\" DevicePath \"\"" Oct 09 00:34:21 crc kubenswrapper[4810]: I1009 00:34:21.762010 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/800f594c-ea98-4419-8f92-6c0e75e86709-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.075323 4810 generic.go:334] "Generic (PLEG): container finished" podID="800f594c-ea98-4419-8f92-6c0e75e86709" containerID="47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c" exitCode=0 Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.075383 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerDied","Data":"47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c"} Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.075446 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sll58" event={"ID":"800f594c-ea98-4419-8f92-6c0e75e86709","Type":"ContainerDied","Data":"43bab295b063841a9a571e1fadeed7288a0b8aff9528ecc38f732a8516ad02aa"} Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.075441 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sll58" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.075465 4810 scope.go:117] "RemoveContainer" containerID="47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.108939 4810 scope.go:117] "RemoveContainer" containerID="8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.127017 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sll58"] Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.138759 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sll58"] Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.146417 4810 scope.go:117] "RemoveContainer" containerID="cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.183847 4810 scope.go:117] "RemoveContainer" containerID="47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c" Oct 09 00:34:22 crc kubenswrapper[4810]: E1009 00:34:22.184405 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c\": container with ID starting with 47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c not found: ID does not exist" containerID="47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.184440 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c"} err="failed to get container status \"47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c\": rpc error: code = NotFound desc = could not find container \"47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c\": container with ID starting with 47470c16b0e3d86f9058abd7de273c195fad8b8dbe60ed52d0c1431be232db3c not found: ID does not exist" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.184463 4810 scope.go:117] "RemoveContainer" containerID="8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00" Oct 09 00:34:22 crc kubenswrapper[4810]: E1009 00:34:22.184977 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00\": container with ID starting with 8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00 not found: ID does not exist" containerID="8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.185021 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00"} err="failed to get container status \"8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00\": rpc error: code = NotFound desc = could not find container \"8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00\": container with ID starting with 8234bc73dfccbda7cb6eb95e16bb058d9b872527f7105c96217d7705d98e6f00 not found: ID does not exist" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.185057 4810 scope.go:117] "RemoveContainer" containerID="cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726" Oct 09 00:34:22 crc kubenswrapper[4810]: E1009 00:34:22.185494 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726\": container with ID starting with cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726 not found: ID does not exist" containerID="cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726" Oct 09 00:34:22 crc kubenswrapper[4810]: I1009 00:34:22.185577 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726"} err="failed to get container status \"cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726\": rpc error: code = NotFound desc = could not find container \"cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726\": container with ID starting with cd46508bb66484c6bd432d4a84dc1b0cc23a17b48b4a653ca5c939938e857726 not found: ID does not exist" Oct 09 00:34:23 crc kubenswrapper[4810]: I1009 00:34:23.268068 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" path="/var/lib/kubelet/pods/800f594c-ea98-4419-8f92-6c0e75e86709/volumes" Oct 09 00:34:29 crc kubenswrapper[4810]: I1009 00:34:29.254813 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:34:29 crc kubenswrapper[4810]: E1009 00:34:29.256070 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:34:40 crc kubenswrapper[4810]: I1009 00:34:40.254160 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:34:40 crc kubenswrapper[4810]: E1009 00:34:40.255691 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.871561 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-s5ll7/must-gather-q6hd6"] Oct 09 00:34:48 crc kubenswrapper[4810]: E1009 00:34:48.872404 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="registry-server" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.872420 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="registry-server" Oct 09 00:34:48 crc kubenswrapper[4810]: E1009 00:34:48.872448 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="extract-content" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.872456 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="extract-content" Oct 09 00:34:48 crc kubenswrapper[4810]: E1009 00:34:48.872487 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="extract-utilities" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.872499 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="extract-utilities" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.872630 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="800f594c-ea98-4419-8f92-6c0e75e86709" containerName="registry-server" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.874492 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.890805 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-s5ll7"/"openshift-service-ca.crt" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.890946 4810 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-s5ll7"/"kube-root-ca.crt" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.914569 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-s5ll7/must-gather-q6hd6"] Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.950646 4810 scope.go:117] "RemoveContainer" containerID="4e8d58a44d45850c9c745d7a022289ab8eeb4a2fbe3e81ef2bc57f8564d9d432" Oct 09 00:34:48 crc kubenswrapper[4810]: I1009 00:34:48.992800 4810 scope.go:117] "RemoveContainer" containerID="8c2e9b3c8c1082617e1ec64b689149cf307672210b08c1c3446d24e913d31e86" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.012838 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9d70225-59f8-4b83-81b1-7fc68a58e740-must-gather-output\") pod \"must-gather-q6hd6\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.013186 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z2qh\" (UniqueName: \"kubernetes.io/projected/d9d70225-59f8-4b83-81b1-7fc68a58e740-kube-api-access-7z2qh\") pod \"must-gather-q6hd6\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.024012 4810 scope.go:117] "RemoveContainer" containerID="5d04ab197d1af30672995d00d7187a3f0313cb99e1c6b77e898566c0f981cc73" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.052832 4810 scope.go:117] "RemoveContainer" containerID="cd743d278c205e518a8a189dd4232b7e51f2555db58e5370ad453076c44ed58c" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.114900 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9d70225-59f8-4b83-81b1-7fc68a58e740-must-gather-output\") pod \"must-gather-q6hd6\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.114972 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z2qh\" (UniqueName: \"kubernetes.io/projected/d9d70225-59f8-4b83-81b1-7fc68a58e740-kube-api-access-7z2qh\") pod \"must-gather-q6hd6\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.115318 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9d70225-59f8-4b83-81b1-7fc68a58e740-must-gather-output\") pod \"must-gather-q6hd6\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.149566 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z2qh\" (UniqueName: \"kubernetes.io/projected/d9d70225-59f8-4b83-81b1-7fc68a58e740-kube-api-access-7z2qh\") pod \"must-gather-q6hd6\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:49 crc kubenswrapper[4810]: I1009 00:34:49.213651 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:34:50 crc kubenswrapper[4810]: I1009 00:34:49.506971 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-s5ll7/must-gather-q6hd6"] Oct 09 00:34:50 crc kubenswrapper[4810]: I1009 00:34:50.325669 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" event={"ID":"d9d70225-59f8-4b83-81b1-7fc68a58e740","Type":"ContainerStarted","Data":"a9f9dc699bf0932eb81cd2ec615febd4459681b76db6431d606f3b3adb91ccb3"} Oct 09 00:34:52 crc kubenswrapper[4810]: I1009 00:34:52.255848 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:34:52 crc kubenswrapper[4810]: E1009 00:34:52.267015 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:34:57 crc kubenswrapper[4810]: I1009 00:34:57.374986 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" event={"ID":"d9d70225-59f8-4b83-81b1-7fc68a58e740","Type":"ContainerStarted","Data":"0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97"} Oct 09 00:34:57 crc kubenswrapper[4810]: I1009 00:34:57.376781 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" event={"ID":"d9d70225-59f8-4b83-81b1-7fc68a58e740","Type":"ContainerStarted","Data":"f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706"} Oct 09 00:34:57 crc kubenswrapper[4810]: I1009 00:34:57.398802 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" podStartSLOduration=2.424645115 podStartE2EDuration="9.398785871s" podCreationTimestamp="2025-10-09 00:34:48 +0000 UTC" firstStartedPulling="2025-10-09 00:34:49.52026655 +0000 UTC m=+1687.045905251" lastFinishedPulling="2025-10-09 00:34:56.494407306 +0000 UTC m=+1694.020046007" observedRunningTime="2025-10-09 00:34:57.398479862 +0000 UTC m=+1694.924118593" watchObservedRunningTime="2025-10-09 00:34:57.398785871 +0000 UTC m=+1694.924424592" Oct 09 00:35:06 crc kubenswrapper[4810]: I1009 00:35:06.254648 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:35:06 crc kubenswrapper[4810]: E1009 00:35:06.255603 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:35:18 crc kubenswrapper[4810]: I1009 00:35:18.253804 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:35:18 crc kubenswrapper[4810]: E1009 00:35:18.254536 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:35:29 crc kubenswrapper[4810]: I1009 00:35:29.255101 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:35:29 crc kubenswrapper[4810]: E1009 00:35:29.257666 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:35:37 crc kubenswrapper[4810]: I1009 00:35:37.585039 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-jp7tg_456f2042-395a-424b-80c1-3bc40286210d/control-plane-machine-set-operator/0.log" Oct 09 00:35:37 crc kubenswrapper[4810]: I1009 00:35:37.718267 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-5jswd_690a8646-257c-49be-b693-e151a1bab532/kube-rbac-proxy/0.log" Oct 09 00:35:37 crc kubenswrapper[4810]: I1009 00:35:37.749690 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-5jswd_690a8646-257c-49be-b693-e151a1bab532/machine-api-operator/0.log" Oct 09 00:35:41 crc kubenswrapper[4810]: I1009 00:35:41.254399 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:35:41 crc kubenswrapper[4810]: E1009 00:35:41.254798 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.426025 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-mqgg9"] Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.428523 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.451899 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-mqgg9"] Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.523708 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr45k\" (UniqueName: \"kubernetes.io/projected/cf02154c-1275-4014-994b-9ab50036a978-kube-api-access-kr45k\") pod \"infrawatch-operators-mqgg9\" (UID: \"cf02154c-1275-4014-994b-9ab50036a978\") " pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.625103 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr45k\" (UniqueName: \"kubernetes.io/projected/cf02154c-1275-4014-994b-9ab50036a978-kube-api-access-kr45k\") pod \"infrawatch-operators-mqgg9\" (UID: \"cf02154c-1275-4014-994b-9ab50036a978\") " pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.653648 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr45k\" (UniqueName: \"kubernetes.io/projected/cf02154c-1275-4014-994b-9ab50036a978-kube-api-access-kr45k\") pod \"infrawatch-operators-mqgg9\" (UID: \"cf02154c-1275-4014-994b-9ab50036a978\") " pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:44 crc kubenswrapper[4810]: I1009 00:35:44.749689 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:45 crc kubenswrapper[4810]: I1009 00:35:45.214174 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-mqgg9"] Oct 09 00:35:45 crc kubenswrapper[4810]: I1009 00:35:45.227284 4810 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 00:35:45 crc kubenswrapper[4810]: I1009 00:35:45.746601 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-mqgg9" event={"ID":"cf02154c-1275-4014-994b-9ab50036a978","Type":"ContainerStarted","Data":"45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0"} Oct 09 00:35:45 crc kubenswrapper[4810]: I1009 00:35:45.747044 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-mqgg9" event={"ID":"cf02154c-1275-4014-994b-9ab50036a978","Type":"ContainerStarted","Data":"334cbff807e4af8fdebb1d782c1c3584076a82595d2316711a243645800abefb"} Oct 09 00:35:45 crc kubenswrapper[4810]: I1009 00:35:45.762186 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-mqgg9" podStartSLOduration=1.6493634419999998 podStartE2EDuration="1.762163413s" podCreationTimestamp="2025-10-09 00:35:44 +0000 UTC" firstStartedPulling="2025-10-09 00:35:45.227025548 +0000 UTC m=+1742.752664269" lastFinishedPulling="2025-10-09 00:35:45.339825539 +0000 UTC m=+1742.865464240" observedRunningTime="2025-10-09 00:35:45.758603491 +0000 UTC m=+1743.284242192" watchObservedRunningTime="2025-10-09 00:35:45.762163413 +0000 UTC m=+1743.287802104" Oct 09 00:35:49 crc kubenswrapper[4810]: I1009 00:35:49.737845 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-7d4cc89fcb-ld9xm_7f31df7d-685a-472a-b6e5-8a31e96a9897/cert-manager-controller/0.log" Oct 09 00:35:49 crc kubenswrapper[4810]: I1009 00:35:49.808353 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7d9f95dbf-ts9ws_30b302fa-b791-4750-91f8-e6ef6898fd08/cert-manager-cainjector/0.log" Oct 09 00:35:49 crc kubenswrapper[4810]: I1009 00:35:49.890422 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-d969966f-pvdjv_7650fe90-e7bd-44ff-9eb1-92299bae19be/cert-manager-webhook/0.log" Oct 09 00:35:52 crc kubenswrapper[4810]: I1009 00:35:52.270275 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:35:52 crc kubenswrapper[4810]: E1009 00:35:52.271146 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:35:54 crc kubenswrapper[4810]: I1009 00:35:54.749982 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:54 crc kubenswrapper[4810]: I1009 00:35:54.750613 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:54 crc kubenswrapper[4810]: I1009 00:35:54.785159 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:54 crc kubenswrapper[4810]: I1009 00:35:54.843628 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:55 crc kubenswrapper[4810]: I1009 00:35:55.013698 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-mqgg9"] Oct 09 00:35:56 crc kubenswrapper[4810]: I1009 00:35:56.849335 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-mqgg9" podUID="cf02154c-1275-4014-994b-9ab50036a978" containerName="registry-server" containerID="cri-o://45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0" gracePeriod=2 Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.223060 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.400882 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr45k\" (UniqueName: \"kubernetes.io/projected/cf02154c-1275-4014-994b-9ab50036a978-kube-api-access-kr45k\") pod \"cf02154c-1275-4014-994b-9ab50036a978\" (UID: \"cf02154c-1275-4014-994b-9ab50036a978\") " Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.405385 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf02154c-1275-4014-994b-9ab50036a978-kube-api-access-kr45k" (OuterVolumeSpecName: "kube-api-access-kr45k") pod "cf02154c-1275-4014-994b-9ab50036a978" (UID: "cf02154c-1275-4014-994b-9ab50036a978"). InnerVolumeSpecName "kube-api-access-kr45k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.502615 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr45k\" (UniqueName: \"kubernetes.io/projected/cf02154c-1275-4014-994b-9ab50036a978-kube-api-access-kr45k\") on node \"crc\" DevicePath \"\"" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.859632 4810 generic.go:334] "Generic (PLEG): container finished" podID="cf02154c-1275-4014-994b-9ab50036a978" containerID="45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0" exitCode=0 Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.859681 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-mqgg9" event={"ID":"cf02154c-1275-4014-994b-9ab50036a978","Type":"ContainerDied","Data":"45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0"} Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.859708 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-mqgg9" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.859731 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-mqgg9" event={"ID":"cf02154c-1275-4014-994b-9ab50036a978","Type":"ContainerDied","Data":"334cbff807e4af8fdebb1d782c1c3584076a82595d2316711a243645800abefb"} Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.859753 4810 scope.go:117] "RemoveContainer" containerID="45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.904568 4810 scope.go:117] "RemoveContainer" containerID="45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0" Oct 09 00:35:57 crc kubenswrapper[4810]: E1009 00:35:57.905117 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0\": container with ID starting with 45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0 not found: ID does not exist" containerID="45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.905172 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0"} err="failed to get container status \"45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0\": rpc error: code = NotFound desc = could not find container \"45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0\": container with ID starting with 45d2d96eefe96e9c83da7e216c3fd94b92a72b5e1dce59137c1051d17eed5ae0 not found: ID does not exist" Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.906557 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-mqgg9"] Oct 09 00:35:57 crc kubenswrapper[4810]: I1009 00:35:57.913304 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-mqgg9"] Oct 09 00:35:59 crc kubenswrapper[4810]: I1009 00:35:59.267918 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf02154c-1275-4014-994b-9ab50036a978" path="/var/lib/kubelet/pods/cf02154c-1275-4014-994b-9ab50036a978/volumes" Oct 09 00:36:04 crc kubenswrapper[4810]: I1009 00:36:04.254057 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:36:04 crc kubenswrapper[4810]: E1009 00:36:04.254609 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.131856 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/util/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.302390 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/util/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.330995 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/pull/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.340703 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/pull/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.484877 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/util/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.519252 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/pull/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.521055 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_695e9552c02c72940c72621f824780f00ca58086c3badc308bf0a2eb69zl594_deccd7e9-1f1d-4950-90fb-05210cea2cff/extract/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.689715 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/util/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.803881 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/util/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.822900 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/pull/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.828762 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/pull/0.log" Oct 09 00:36:05 crc kubenswrapper[4810]: I1009 00:36:05.995398 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/pull/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.003665 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/util/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.006781 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fwbtpf_95a08308-84d4-4109-bb8c-245f2a80eb9e/extract/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.164416 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/util/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.305840 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/pull/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.311712 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/util/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.313124 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/pull/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.463894 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/util/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.498684 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/extract/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.504492 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2dgbqxp_6ea76cac-18f3-4d2b-baec-628d633a9f15/pull/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.639108 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/extract-utilities/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.794457 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/extract-utilities/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.820772 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/extract-content/0.log" Oct 09 00:36:06 crc kubenswrapper[4810]: I1009 00:36:06.860991 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/extract-content/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.005743 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/extract-content/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.011742 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/extract-utilities/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.234536 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/extract-utilities/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.286446 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-lgslb_14c7ea46-7eeb-4c37-84d1-d3072de592c7/registry-server/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.412866 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/extract-content/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.442757 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/extract-utilities/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.449786 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/extract-content/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.602608 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/extract-utilities/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.655276 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/extract-content/0.log" Oct 09 00:36:07 crc kubenswrapper[4810]: I1009 00:36:07.795302 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/util/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.007302 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-p6bh5_7b8d67b8-aebd-42c7-98fe-5730ea71524e/registry-server/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.008588 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/util/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.035688 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/pull/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.096386 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/pull/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.225957 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/util/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.236864 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/pull/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.273672 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_dec6855936450d9745a6250315030408d50c8ea57f37cefa88be37c927m9x6t_6c7a43c8-df69-4c5d-9ae0-1c0fba3717b5/extract/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.300421 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fxcb7_0419ac3c-a0a8-4ff4-80d5-fbfbf5576ddc/marketplace-operator/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.424737 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/extract-utilities/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.566056 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/extract-utilities/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.578270 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/extract-content/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.582585 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/extract-content/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.743472 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/extract-content/0.log" Oct 09 00:36:08 crc kubenswrapper[4810]: I1009 00:36:08.783411 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/extract-utilities/0.log" Oct 09 00:36:09 crc kubenswrapper[4810]: I1009 00:36:09.149926 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-plszl_ff3acf97-a548-45d1-9afe-0d228613d06b/registry-server/0.log" Oct 09 00:36:19 crc kubenswrapper[4810]: I1009 00:36:19.254985 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:36:19 crc kubenswrapper[4810]: E1009 00:36:19.256182 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:36:21 crc kubenswrapper[4810]: I1009 00:36:21.456218 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-296t8_473feb2d-d624-4d00-8a07-965583adf996/prometheus-operator/0.log" Oct 09 00:36:21 crc kubenswrapper[4810]: I1009 00:36:21.600318 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6bbb47dfd-hnrsg_a70271d4-45c7-4ba1-a2ec-35b10e957709/prometheus-operator-admission-webhook/0.log" Oct 09 00:36:21 crc kubenswrapper[4810]: I1009 00:36:21.633557 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6bbb47dfd-w8pwd_79b6b416-cd91-46a8-9f07-6cd6e42ad85d/prometheus-operator-admission-webhook/0.log" Oct 09 00:36:21 crc kubenswrapper[4810]: I1009 00:36:21.790648 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-qz8pk_67646063-cb29-41a3-ae59-baca62fca646/operator/0.log" Oct 09 00:36:21 crc kubenswrapper[4810]: I1009 00:36:21.817598 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-jdbpj_06e31d53-08d6-4cc9-9c5e-b2352bde041f/perses-operator/0.log" Oct 09 00:36:32 crc kubenswrapper[4810]: I1009 00:36:32.254414 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:36:32 crc kubenswrapper[4810]: E1009 00:36:32.255494 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:36:45 crc kubenswrapper[4810]: I1009 00:36:45.254005 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:36:45 crc kubenswrapper[4810]: E1009 00:36:45.254761 4810 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-6752w_openshift-machine-config-operator(e7b43917-aa65-43dc-b71b-7de0af71d3f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-6752w" podUID="e7b43917-aa65-43dc-b71b-7de0af71d3f6" Oct 09 00:36:59 crc kubenswrapper[4810]: I1009 00:36:59.259987 4810 scope.go:117] "RemoveContainer" containerID="50221deaad82b1ff26d349c83d7c85066413eed2bd9e0d56f21c32aecc9a276d" Oct 09 00:37:00 crc kubenswrapper[4810]: I1009 00:37:00.423234 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-6752w" event={"ID":"e7b43917-aa65-43dc-b71b-7de0af71d3f6","Type":"ContainerStarted","Data":"ad26260a7029a455759be78616e974ee249507de618f275e8181cb82eb3026bb"} Oct 09 00:37:03 crc kubenswrapper[4810]: I1009 00:37:03.455219 4810 generic.go:334] "Generic (PLEG): container finished" podID="d9d70225-59f8-4b83-81b1-7fc68a58e740" containerID="f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706" exitCode=0 Oct 09 00:37:03 crc kubenswrapper[4810]: I1009 00:37:03.455345 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" event={"ID":"d9d70225-59f8-4b83-81b1-7fc68a58e740","Type":"ContainerDied","Data":"f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706"} Oct 09 00:37:03 crc kubenswrapper[4810]: I1009 00:37:03.456754 4810 scope.go:117] "RemoveContainer" containerID="f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706" Oct 09 00:37:03 crc kubenswrapper[4810]: I1009 00:37:03.881495 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s5ll7_must-gather-q6hd6_d9d70225-59f8-4b83-81b1-7fc68a58e740/gather/0.log" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.395520 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bxzfn"] Oct 09 00:37:06 crc kubenswrapper[4810]: E1009 00:37:06.398634 4810 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf02154c-1275-4014-994b-9ab50036a978" containerName="registry-server" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.398844 4810 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf02154c-1275-4014-994b-9ab50036a978" containerName="registry-server" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.399256 4810 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf02154c-1275-4014-994b-9ab50036a978" containerName="registry-server" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.401310 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.415345 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bxzfn"] Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.506563 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h8dh\" (UniqueName: \"kubernetes.io/projected/0e35ab03-e871-4cc1-b394-87546b2f02cb-kube-api-access-2h8dh\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.506665 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-catalog-content\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.506958 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-utilities\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.608622 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-catalog-content\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.608737 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-utilities\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.608778 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h8dh\" (UniqueName: \"kubernetes.io/projected/0e35ab03-e871-4cc1-b394-87546b2f02cb-kube-api-access-2h8dh\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.609611 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-catalog-content\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.609749 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-utilities\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.647460 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h8dh\" (UniqueName: \"kubernetes.io/projected/0e35ab03-e871-4cc1-b394-87546b2f02cb-kube-api-access-2h8dh\") pod \"redhat-operators-bxzfn\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:06 crc kubenswrapper[4810]: I1009 00:37:06.731704 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:07 crc kubenswrapper[4810]: I1009 00:37:07.097091 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bxzfn"] Oct 09 00:37:07 crc kubenswrapper[4810]: I1009 00:37:07.510905 4810 generic.go:334] "Generic (PLEG): container finished" podID="0e35ab03-e871-4cc1-b394-87546b2f02cb" containerID="f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b" exitCode=0 Oct 09 00:37:07 crc kubenswrapper[4810]: I1009 00:37:07.511016 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerDied","Data":"f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b"} Oct 09 00:37:07 crc kubenswrapper[4810]: I1009 00:37:07.512493 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerStarted","Data":"a4c9b902be1fad1bab06034c00ca433ac4d9eb6139a94bbb424b73d199331665"} Oct 09 00:37:08 crc kubenswrapper[4810]: I1009 00:37:08.522705 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerStarted","Data":"64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572"} Oct 09 00:37:09 crc kubenswrapper[4810]: I1009 00:37:09.535958 4810 generic.go:334] "Generic (PLEG): container finished" podID="0e35ab03-e871-4cc1-b394-87546b2f02cb" containerID="64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572" exitCode=0 Oct 09 00:37:09 crc kubenswrapper[4810]: I1009 00:37:09.536067 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerDied","Data":"64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572"} Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.005070 4810 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lj4nt"] Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.012146 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.018139 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lj4nt"] Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.168466 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wvq5\" (UniqueName: \"kubernetes.io/projected/3ac2006b-266e-4752-afc8-ed3d05511567-kube-api-access-9wvq5\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.168778 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-utilities\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.168872 4810 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-catalog-content\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.270628 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-catalog-content\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.270734 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wvq5\" (UniqueName: \"kubernetes.io/projected/3ac2006b-266e-4752-afc8-ed3d05511567-kube-api-access-9wvq5\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.270786 4810 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-utilities\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.271429 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-utilities\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.271777 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-catalog-content\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.300280 4810 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wvq5\" (UniqueName: \"kubernetes.io/projected/3ac2006b-266e-4752-afc8-ed3d05511567-kube-api-access-9wvq5\") pod \"community-operators-lj4nt\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.376504 4810 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.559599 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerStarted","Data":"3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa"} Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.577083 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bxzfn" podStartSLOduration=2.162540088 podStartE2EDuration="4.577063793s" podCreationTimestamp="2025-10-09 00:37:06 +0000 UTC" firstStartedPulling="2025-10-09 00:37:07.513145021 +0000 UTC m=+1825.038783722" lastFinishedPulling="2025-10-09 00:37:09.927668696 +0000 UTC m=+1827.453307427" observedRunningTime="2025-10-09 00:37:10.575328684 +0000 UTC m=+1828.100967385" watchObservedRunningTime="2025-10-09 00:37:10.577063793 +0000 UTC m=+1828.102702494" Oct 09 00:37:10 crc kubenswrapper[4810]: I1009 00:37:10.811237 4810 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lj4nt"] Oct 09 00:37:10 crc kubenswrapper[4810]: W1009 00:37:10.817987 4810 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ac2006b_266e_4752_afc8_ed3d05511567.slice/crio-9d448a06a8b4793b0d3c414b2677a12ab2a60895ee32fcc34f9963b478d06f4c WatchSource:0}: Error finding container 9d448a06a8b4793b0d3c414b2677a12ab2a60895ee32fcc34f9963b478d06f4c: Status 404 returned error can't find the container with id 9d448a06a8b4793b0d3c414b2677a12ab2a60895ee32fcc34f9963b478d06f4c Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.567554 4810 generic.go:334] "Generic (PLEG): container finished" podID="3ac2006b-266e-4752-afc8-ed3d05511567" containerID="ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1" exitCode=0 Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.567614 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lj4nt" event={"ID":"3ac2006b-266e-4752-afc8-ed3d05511567","Type":"ContainerDied","Data":"ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1"} Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.567959 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lj4nt" event={"ID":"3ac2006b-266e-4752-afc8-ed3d05511567","Type":"ContainerStarted","Data":"9d448a06a8b4793b0d3c414b2677a12ab2a60895ee32fcc34f9963b478d06f4c"} Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.577862 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-s5ll7/must-gather-q6hd6"] Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.578099 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" podUID="d9d70225-59f8-4b83-81b1-7fc68a58e740" containerName="copy" containerID="cri-o://0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97" gracePeriod=2 Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.584655 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-s5ll7/must-gather-q6hd6"] Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.940431 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s5ll7_must-gather-q6hd6_d9d70225-59f8-4b83-81b1-7fc68a58e740/copy/0.log" Oct 09 00:37:11 crc kubenswrapper[4810]: I1009 00:37:11.941144 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.006267 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9d70225-59f8-4b83-81b1-7fc68a58e740-must-gather-output\") pod \"d9d70225-59f8-4b83-81b1-7fc68a58e740\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.006434 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z2qh\" (UniqueName: \"kubernetes.io/projected/d9d70225-59f8-4b83-81b1-7fc68a58e740-kube-api-access-7z2qh\") pod \"d9d70225-59f8-4b83-81b1-7fc68a58e740\" (UID: \"d9d70225-59f8-4b83-81b1-7fc68a58e740\") " Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.015421 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9d70225-59f8-4b83-81b1-7fc68a58e740-kube-api-access-7z2qh" (OuterVolumeSpecName: "kube-api-access-7z2qh") pod "d9d70225-59f8-4b83-81b1-7fc68a58e740" (UID: "d9d70225-59f8-4b83-81b1-7fc68a58e740"). InnerVolumeSpecName "kube-api-access-7z2qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.064662 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9d70225-59f8-4b83-81b1-7fc68a58e740-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "d9d70225-59f8-4b83-81b1-7fc68a58e740" (UID: "d9d70225-59f8-4b83-81b1-7fc68a58e740"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.108235 4810 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d9d70225-59f8-4b83-81b1-7fc68a58e740-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.108293 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z2qh\" (UniqueName: \"kubernetes.io/projected/d9d70225-59f8-4b83-81b1-7fc68a58e740-kube-api-access-7z2qh\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.576204 4810 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s5ll7_must-gather-q6hd6_d9d70225-59f8-4b83-81b1-7fc68a58e740/copy/0.log" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.576599 4810 generic.go:334] "Generic (PLEG): container finished" podID="d9d70225-59f8-4b83-81b1-7fc68a58e740" containerID="0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97" exitCode=143 Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.576651 4810 scope.go:117] "RemoveContainer" containerID="0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.576667 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s5ll7/must-gather-q6hd6" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.591691 4810 scope.go:117] "RemoveContainer" containerID="f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.632728 4810 scope.go:117] "RemoveContainer" containerID="0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97" Oct 09 00:37:12 crc kubenswrapper[4810]: E1009 00:37:12.634357 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97\": container with ID starting with 0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97 not found: ID does not exist" containerID="0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.634399 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97"} err="failed to get container status \"0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97\": rpc error: code = NotFound desc = could not find container \"0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97\": container with ID starting with 0b8ec41863ec3270b6c41e6632a6fc47c76668e38154f66cca02022bb3077b97 not found: ID does not exist" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.634440 4810 scope.go:117] "RemoveContainer" containerID="f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706" Oct 09 00:37:12 crc kubenswrapper[4810]: E1009 00:37:12.634723 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706\": container with ID starting with f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706 not found: ID does not exist" containerID="f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706" Oct 09 00:37:12 crc kubenswrapper[4810]: I1009 00:37:12.634756 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706"} err="failed to get container status \"f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706\": rpc error: code = NotFound desc = could not find container \"f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706\": container with ID starting with f6662e8763283d609841b7d444e10749d521bd450a627adf66b149490c069706 not found: ID does not exist" Oct 09 00:37:13 crc kubenswrapper[4810]: I1009 00:37:13.267625 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9d70225-59f8-4b83-81b1-7fc68a58e740" path="/var/lib/kubelet/pods/d9d70225-59f8-4b83-81b1-7fc68a58e740/volumes" Oct 09 00:37:13 crc kubenswrapper[4810]: I1009 00:37:13.588913 4810 generic.go:334] "Generic (PLEG): container finished" podID="3ac2006b-266e-4752-afc8-ed3d05511567" containerID="213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e" exitCode=0 Oct 09 00:37:13 crc kubenswrapper[4810]: I1009 00:37:13.588977 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lj4nt" event={"ID":"3ac2006b-266e-4752-afc8-ed3d05511567","Type":"ContainerDied","Data":"213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e"} Oct 09 00:37:14 crc kubenswrapper[4810]: I1009 00:37:14.599596 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lj4nt" event={"ID":"3ac2006b-266e-4752-afc8-ed3d05511567","Type":"ContainerStarted","Data":"9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed"} Oct 09 00:37:14 crc kubenswrapper[4810]: I1009 00:37:14.623099 4810 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lj4nt" podStartSLOduration=3.095230961 podStartE2EDuration="5.623075903s" podCreationTimestamp="2025-10-09 00:37:09 +0000 UTC" firstStartedPulling="2025-10-09 00:37:11.569120274 +0000 UTC m=+1829.094758985" lastFinishedPulling="2025-10-09 00:37:14.096965226 +0000 UTC m=+1831.622603927" observedRunningTime="2025-10-09 00:37:14.616100514 +0000 UTC m=+1832.141739235" watchObservedRunningTime="2025-10-09 00:37:14.623075903 +0000 UTC m=+1832.148714614" Oct 09 00:37:16 crc kubenswrapper[4810]: I1009 00:37:16.732861 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:16 crc kubenswrapper[4810]: I1009 00:37:16.733336 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:16 crc kubenswrapper[4810]: I1009 00:37:16.789380 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:17 crc kubenswrapper[4810]: I1009 00:37:17.680063 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:20 crc kubenswrapper[4810]: I1009 00:37:20.378204 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:20 crc kubenswrapper[4810]: I1009 00:37:20.378723 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:20 crc kubenswrapper[4810]: I1009 00:37:20.462572 4810 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:20 crc kubenswrapper[4810]: I1009 00:37:20.715647 4810 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:20 crc kubenswrapper[4810]: I1009 00:37:20.779817 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bxzfn"] Oct 09 00:37:20 crc kubenswrapper[4810]: I1009 00:37:20.780296 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bxzfn" podUID="0e35ab03-e871-4cc1-b394-87546b2f02cb" containerName="registry-server" containerID="cri-o://3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa" gracePeriod=2 Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.247968 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.347268 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-catalog-content\") pod \"0e35ab03-e871-4cc1-b394-87546b2f02cb\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.347580 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h8dh\" (UniqueName: \"kubernetes.io/projected/0e35ab03-e871-4cc1-b394-87546b2f02cb-kube-api-access-2h8dh\") pod \"0e35ab03-e871-4cc1-b394-87546b2f02cb\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.347599 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-utilities\") pod \"0e35ab03-e871-4cc1-b394-87546b2f02cb\" (UID: \"0e35ab03-e871-4cc1-b394-87546b2f02cb\") " Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.348830 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-utilities" (OuterVolumeSpecName: "utilities") pod "0e35ab03-e871-4cc1-b394-87546b2f02cb" (UID: "0e35ab03-e871-4cc1-b394-87546b2f02cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.352662 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e35ab03-e871-4cc1-b394-87546b2f02cb-kube-api-access-2h8dh" (OuterVolumeSpecName: "kube-api-access-2h8dh") pod "0e35ab03-e871-4cc1-b394-87546b2f02cb" (UID: "0e35ab03-e871-4cc1-b394-87546b2f02cb"). InnerVolumeSpecName "kube-api-access-2h8dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.437238 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0e35ab03-e871-4cc1-b394-87546b2f02cb" (UID: "0e35ab03-e871-4cc1-b394-87546b2f02cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.449752 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.449803 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h8dh\" (UniqueName: \"kubernetes.io/projected/0e35ab03-e871-4cc1-b394-87546b2f02cb-kube-api-access-2h8dh\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.449816 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0e35ab03-e871-4cc1-b394-87546b2f02cb-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.668417 4810 generic.go:334] "Generic (PLEG): container finished" podID="0e35ab03-e871-4cc1-b394-87546b2f02cb" containerID="3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa" exitCode=0 Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.668483 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bxzfn" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.668519 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerDied","Data":"3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa"} Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.669087 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bxzfn" event={"ID":"0e35ab03-e871-4cc1-b394-87546b2f02cb","Type":"ContainerDied","Data":"a4c9b902be1fad1bab06034c00ca433ac4d9eb6139a94bbb424b73d199331665"} Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.669123 4810 scope.go:117] "RemoveContainer" containerID="3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.692744 4810 scope.go:117] "RemoveContainer" containerID="64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.716967 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bxzfn"] Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.726147 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bxzfn"] Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.768345 4810 scope.go:117] "RemoveContainer" containerID="f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.809648 4810 scope.go:117] "RemoveContainer" containerID="3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa" Oct 09 00:37:21 crc kubenswrapper[4810]: E1009 00:37:21.811684 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa\": container with ID starting with 3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa not found: ID does not exist" containerID="3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.811754 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa"} err="failed to get container status \"3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa\": rpc error: code = NotFound desc = could not find container \"3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa\": container with ID starting with 3d360100c6294e7aaef44c5a61ee789f0a84b50f5a6996a66d7db7df81a154fa not found: ID does not exist" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.811797 4810 scope.go:117] "RemoveContainer" containerID="64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572" Oct 09 00:37:21 crc kubenswrapper[4810]: E1009 00:37:21.812365 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572\": container with ID starting with 64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572 not found: ID does not exist" containerID="64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.812399 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572"} err="failed to get container status \"64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572\": rpc error: code = NotFound desc = could not find container \"64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572\": container with ID starting with 64562bcb979fb6337b79d3f1bc3be09757d161c37aaf7a7f2cb52bf66be38572 not found: ID does not exist" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.812445 4810 scope.go:117] "RemoveContainer" containerID="f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b" Oct 09 00:37:21 crc kubenswrapper[4810]: E1009 00:37:21.812867 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b\": container with ID starting with f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b not found: ID does not exist" containerID="f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b" Oct 09 00:37:21 crc kubenswrapper[4810]: I1009 00:37:21.812912 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b"} err="failed to get container status \"f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b\": rpc error: code = NotFound desc = could not find container \"f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b\": container with ID starting with f61d82a1a1c52d8e1a5d94e3e3d3aa9199fe694c6a9e7d4552fed191907b6d9b not found: ID does not exist" Oct 09 00:37:23 crc kubenswrapper[4810]: I1009 00:37:23.269934 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e35ab03-e871-4cc1-b394-87546b2f02cb" path="/var/lib/kubelet/pods/0e35ab03-e871-4cc1-b394-87546b2f02cb/volumes" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.173774 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lj4nt"] Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.173989 4810 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lj4nt" podUID="3ac2006b-266e-4752-afc8-ed3d05511567" containerName="registry-server" containerID="cri-o://9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed" gracePeriod=2 Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.632953 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.704114 4810 generic.go:334] "Generic (PLEG): container finished" podID="3ac2006b-266e-4752-afc8-ed3d05511567" containerID="9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed" exitCode=0 Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.704172 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lj4nt" event={"ID":"3ac2006b-266e-4752-afc8-ed3d05511567","Type":"ContainerDied","Data":"9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed"} Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.704252 4810 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lj4nt" event={"ID":"3ac2006b-266e-4752-afc8-ed3d05511567","Type":"ContainerDied","Data":"9d448a06a8b4793b0d3c414b2677a12ab2a60895ee32fcc34f9963b478d06f4c"} Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.704198 4810 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lj4nt" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.704284 4810 scope.go:117] "RemoveContainer" containerID="9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.713194 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-utilities\") pod \"3ac2006b-266e-4752-afc8-ed3d05511567\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.713451 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-catalog-content\") pod \"3ac2006b-266e-4752-afc8-ed3d05511567\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.713511 4810 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wvq5\" (UniqueName: \"kubernetes.io/projected/3ac2006b-266e-4752-afc8-ed3d05511567-kube-api-access-9wvq5\") pod \"3ac2006b-266e-4752-afc8-ed3d05511567\" (UID: \"3ac2006b-266e-4752-afc8-ed3d05511567\") " Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.715307 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-utilities" (OuterVolumeSpecName: "utilities") pod "3ac2006b-266e-4752-afc8-ed3d05511567" (UID: "3ac2006b-266e-4752-afc8-ed3d05511567"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.727233 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ac2006b-266e-4752-afc8-ed3d05511567-kube-api-access-9wvq5" (OuterVolumeSpecName: "kube-api-access-9wvq5") pod "3ac2006b-266e-4752-afc8-ed3d05511567" (UID: "3ac2006b-266e-4752-afc8-ed3d05511567"). InnerVolumeSpecName "kube-api-access-9wvq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.729453 4810 scope.go:117] "RemoveContainer" containerID="213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.750921 4810 scope.go:117] "RemoveContainer" containerID="ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.781134 4810 scope.go:117] "RemoveContainer" containerID="9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed" Oct 09 00:37:24 crc kubenswrapper[4810]: E1009 00:37:24.781672 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed\": container with ID starting with 9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed not found: ID does not exist" containerID="9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.781729 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed"} err="failed to get container status \"9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed\": rpc error: code = NotFound desc = could not find container \"9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed\": container with ID starting with 9e537caf71cf387458e8b4cc853e58a94beea1c39f030242c53610b711ae0fed not found: ID does not exist" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.781751 4810 scope.go:117] "RemoveContainer" containerID="213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.781860 4810 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ac2006b-266e-4752-afc8-ed3d05511567" (UID: "3ac2006b-266e-4752-afc8-ed3d05511567"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 00:37:24 crc kubenswrapper[4810]: E1009 00:37:24.782246 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e\": container with ID starting with 213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e not found: ID does not exist" containerID="213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.782279 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e"} err="failed to get container status \"213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e\": rpc error: code = NotFound desc = could not find container \"213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e\": container with ID starting with 213e68212f7e67ae20f9c49a757dd60349809ad93365db3f2f2493cc62a2d70e not found: ID does not exist" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.782298 4810 scope.go:117] "RemoveContainer" containerID="ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1" Oct 09 00:37:24 crc kubenswrapper[4810]: E1009 00:37:24.782589 4810 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1\": container with ID starting with ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1 not found: ID does not exist" containerID="ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.782624 4810 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1"} err="failed to get container status \"ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1\": rpc error: code = NotFound desc = could not find container \"ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1\": container with ID starting with ad019b4059a9906a3f5b3f16ebf252252523fa64485ba9ac42263a8e187524a1 not found: ID does not exist" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.816007 4810 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.816401 4810 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wvq5\" (UniqueName: \"kubernetes.io/projected/3ac2006b-266e-4752-afc8-ed3d05511567-kube-api-access-9wvq5\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:24 crc kubenswrapper[4810]: I1009 00:37:24.816427 4810 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ac2006b-266e-4752-afc8-ed3d05511567-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 00:37:25 crc kubenswrapper[4810]: I1009 00:37:25.060747 4810 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lj4nt"] Oct 09 00:37:25 crc kubenswrapper[4810]: I1009 00:37:25.069870 4810 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lj4nt"] Oct 09 00:37:25 crc kubenswrapper[4810]: I1009 00:37:25.269655 4810 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ac2006b-266e-4752-afc8-ed3d05511567" path="/var/lib/kubelet/pods/3ac2006b-266e-4752-afc8-ed3d05511567/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071602011024436 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071602012017354 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071575677016531 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071575677015501 5ustar corecore